blob: 99b84c201a203cde36592f6c867ab0819f940e43 [file] [log] [blame]
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +08001/*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation; either version 2 of the License, or
5 * (at your option) any later version.
6 *
7 */
8
9#define LOG_TAG "V4L2Camera"
10#include <utils/Log.h>
11
12extern "C" {
13#include <stdio.h>
14#include <stdlib.h>
15#include <string.h>
16#include <fcntl.h>
17#include <unistd.h>
18#include <errno.h>
19#include <sys/ioctl.h>
20#include <sys/mman.h>
21#include <sys/select.h>
22#include "uvc_compat.h"
23#include "v4l2_formats.h"
24};
25
26#include "V4L2Camera.h"
27#include "Utils.h"
28#include "Converter.h"
29
30#define HEADERFRAME1 0xaf
31
32//#define DEBUG_FRAME 0
33
34#ifdef DEBUG_FRAME
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +080035#define LOG_FRAME ALOGD
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080036#else
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +080037#define LOG_FRAME ALOGV
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080038#endif
39
40namespace android {
41
42V4L2Camera::V4L2Camera ()
43 : fd(-1), nQueued(0), nDequeued(0)
44{
45 videoIn = (struct vdIn *) calloc (1, sizeof (struct vdIn));
46}
47
48V4L2Camera::~V4L2Camera()
49{
50 Close();
51 free(videoIn);
52}
53
54int V4L2Camera::Open (const char *device)
55{
56 int ret;
57
58 /* Close the previous instance, if any */
59 Close();
60
61 memset(videoIn, 0, sizeof (struct vdIn));
62
63 if ((fd = open(device, O_RDWR)) == -1) {
Chih-Wei Huang44e6c662017-05-10 13:10:08 +080064 ALOGE("ERROR opening V4L interface %s: %s", device, strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080065 return -1;
66 }
Chih-Wei Huang342584c2018-06-20 18:04:58 +080067 ALOGD("Open %s OK", device);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080068
69 ret = ioctl (fd, VIDIOC_QUERYCAP, &videoIn->cap);
70 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +080071 ALOGE("Error opening device: unable to query device.");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080072 return -1;
73 }
74
75 if ((videoIn->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +080076 ALOGE("Error opening device: video capture not supported.");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080077 return -1;
78 }
79
80 if (!(videoIn->cap.capabilities & V4L2_CAP_STREAMING)) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +080081 ALOGE("Capture device does not support streaming i/o");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +080082 return -1;
83 }
84
85 /* Enumerate all available frame formats */
86 EnumFrameFormats();
87
88 return ret;
89}
90
91void V4L2Camera::Close ()
92{
93 /* Release the temporary buffer, if any */
94 if (videoIn->tmpBuffer)
95 free(videoIn->tmpBuffer);
96 videoIn->tmpBuffer = NULL;
97
98 /* Close the file descriptor */
99 if (fd > 0)
100 close(fd);
101 fd = -1;
102}
103
104static int my_abs(int x)
105{
106 return (x < 0) ? -x : x;
107}
108
109int V4L2Camera::Init(int width, int height, int fps)
110{
Chih-Wei Huang44e6c662017-05-10 13:10:08 +0800111 ALOGD("V4L2Camera::Init(%d, %d, %d)", width, height, fps);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800112
113 /* Initialize the capture to the specified width and height */
114 static const struct {
115 int fmt; /* PixelFormat */
116 int bpp; /* bytes per pixel */
117 int isplanar; /* If format is planar or not */
118 int allowscrop; /* If we support cropping with this pixel format */
119 } pixFmtsOrder[] = {
120 {V4L2_PIX_FMT_YUYV, 2,0,1},
121 {V4L2_PIX_FMT_YVYU, 2,0,1},
122 {V4L2_PIX_FMT_UYVY, 2,0,1},
123 {V4L2_PIX_FMT_YYUV, 2,0,1},
124 {V4L2_PIX_FMT_SPCA501, 2,0,0},
125 {V4L2_PIX_FMT_SPCA505, 2,0,0},
126 {V4L2_PIX_FMT_SPCA508, 2,0,0},
127 {V4L2_PIX_FMT_YUV420, 0,1,0},
128 {V4L2_PIX_FMT_YVU420, 0,1,0},
129 {V4L2_PIX_FMT_NV12, 0,1,0},
130 {V4L2_PIX_FMT_NV21, 0,1,0},
131 {V4L2_PIX_FMT_NV16, 0,1,0},
132 {V4L2_PIX_FMT_NV61, 0,1,0},
133 {V4L2_PIX_FMT_Y41P, 0,0,0},
134 {V4L2_PIX_FMT_SGBRG8, 0,0,0},
135 {V4L2_PIX_FMT_SGRBG8, 0,0,0},
136 {V4L2_PIX_FMT_SBGGR8, 0,0,0},
137 {V4L2_PIX_FMT_SRGGB8, 0,0,0},
138 {V4L2_PIX_FMT_BGR24, 3,0,1},
139 {V4L2_PIX_FMT_RGB24, 3,0,1},
140 {V4L2_PIX_FMT_MJPEG, 0,1,0},
141 {V4L2_PIX_FMT_JPEG, 0,1,0},
142 {V4L2_PIX_FMT_GREY, 1,0,1},
143 {V4L2_PIX_FMT_Y16, 2,0,1},
144 };
145
146 int ret;
147
148 // If no formats, break here
149 if (m_AllFmts.isEmpty()) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800150 ALOGE("No video formats available");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800151 return -1;
152 }
153
154 // Try to get the closest match ...
155 SurfaceDesc closest;
156 int closestDArea = -1;
157 int closestDFps = -1;
158 unsigned int i;
159 int area = width * height;
160 for (i = 0; i < m_AllFmts.size(); i++) {
161 SurfaceDesc sd = m_AllFmts[i];
162
163 // Always choose a bigger or equal surface
164 if (sd.getWidth() >= width &&
165 sd.getHeight() >= height) {
166
167 int difArea = sd.getArea() - area;
168 int difFps = my_abs(sd.getFps() - fps);
169
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800170 ALOGD("Trying format: (%d x %d), Fps: %d [difArea:%d, difFps:%d, cDifArea:%d, cDifFps:%d]",sd.getWidth(),sd.getHeight(),sd.getFps(), difArea, difFps, closestDArea, closestDFps);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800171 if (closestDArea < 0 ||
172 difArea < closestDArea ||
173 (difArea == closestDArea && difFps < closestDFps)) {
174
175 // Store approximation
176 closestDArea = difArea;
177 closestDFps = difFps;
178
179 // And the new surface descriptor
180 closest = sd;
181 }
182 }
183 }
184
185 if (closestDArea == -1) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800186 ALOGE("Size not available: (%d x %d)",width,height);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800187 return -1;
188 }
189
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800190 ALOGD("Selected format: (%d x %d), Fps: %d",closest.getWidth(),closest.getHeight(),closest.getFps());
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800191
192 // Check if we will have to crop the captured image
193 bool crop = width != closest.getWidth() || height != closest.getHeight();
194
195 // Iterate through pixel formats from best to worst
196 ret = -1;
197 for (i=0; i < (sizeof(pixFmtsOrder) / sizeof(pixFmtsOrder[0])); i++) {
198
199 // If we will need to crop, make sure to only select formats we can crop...
200 if (!crop || pixFmtsOrder[i].allowscrop) {
201
202 memset(&videoIn->format,0,sizeof(videoIn->format));
203 videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
204 videoIn->format.fmt.pix.width = closest.getWidth();
205 videoIn->format.fmt.pix.height = closest.getHeight();
206 videoIn->format.fmt.pix.pixelformat = pixFmtsOrder[i].fmt;
207
208 ret = ioctl(fd, VIDIOC_TRY_FMT, &videoIn->format);
Alberto Panizzof4b06ac2013-07-06 18:27:43 +0200209 if (ret >= 0 &&
210 videoIn->format.fmt.pix.width == (uint)closest.getWidth() &&
211 videoIn->format.fmt.pix.height == (uint)closest.getHeight()) {
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800212 break;
213 }
214 }
215 }
216 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800217 ALOGE("Open: VIDIOC_TRY_FMT Failed: %s", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800218 return ret;
219 }
220
221 /* Set the format */
222 memset(&videoIn->format,0,sizeof(videoIn->format));
223 videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
224 videoIn->format.fmt.pix.width = closest.getWidth();
225 videoIn->format.fmt.pix.height = closest.getHeight();
226 videoIn->format.fmt.pix.pixelformat = pixFmtsOrder[i].fmt;
227 ret = ioctl(fd, VIDIOC_S_FMT, &videoIn->format);
228 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800229 ALOGE("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800230 return ret;
231 }
232
233
234 /* Query for the effective video format used */
235 memset(&videoIn->format,0,sizeof(videoIn->format));
236 videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
237 ret = ioctl(fd, VIDIOC_G_FMT, &videoIn->format);
238 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800239 ALOGE("Open: VIDIOC_G_FMT Failed: %s", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800240 return ret;
241 }
242
243 /* Note VIDIOC_S_FMT may change width and height. */
244
245 /* Buggy driver paranoia. */
246 unsigned int min = videoIn->format.fmt.pix.width * 2;
247 if (videoIn->format.fmt.pix.bytesperline < min)
248 videoIn->format.fmt.pix.bytesperline = min;
249 min = videoIn->format.fmt.pix.bytesperline * videoIn->format.fmt.pix.height;
250 if (videoIn->format.fmt.pix.sizeimage < min)
251 videoIn->format.fmt.pix.sizeimage = min;
252
253 /* Store the pixel formats we will use */
254 videoIn->outWidth = width;
255 videoIn->outHeight = height;
256 videoIn->outFrameSize = width * height << 1; // Calculate the expected output framesize in YUYV
257 videoIn->capBytesPerPixel = pixFmtsOrder[i].bpp;
258
259 /* Now calculate cropping margins, if needed, rounding to even */
260 int startX = ((closest.getWidth() - width) >> 1) & (-2);
261 int startY = ((closest.getHeight() - height) >> 1) & (-2);
262
263 /* Avoid crashing if the mode found is smaller than the requested */
264 if (startX < 0) {
265 videoIn->outWidth += startX;
266 startX = 0;
267 }
268 if (startY < 0) {
269 videoIn->outHeight += startY;
270 startY = 0;
271 }
272
273 /* Calculate the starting offset into each captured frame */
274 videoIn->capCropOffset = (startX * videoIn->capBytesPerPixel) +
275 (videoIn->format.fmt.pix.bytesperline * startY);
276
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800277 ALOGI("Cropping from origin: %dx%d - size: %dx%d (offset:%d)",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800278 startX,startY,
279 videoIn->outWidth,videoIn->outHeight,
280 videoIn->capCropOffset);
281
282 /* sets video device frame rate */
283 memset(&videoIn->params,0,sizeof(videoIn->params));
284 videoIn->params.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
285 videoIn->params.parm.capture.timeperframe.numerator = 1;
286 videoIn->params.parm.capture.timeperframe.denominator = closest.getFps();
287
288 /* Set the framerate. If it fails, it wont be fatal */
289 if (ioctl(fd,VIDIOC_S_PARM,&videoIn->params) < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800290 ALOGE("VIDIOC_S_PARM error: Unable to set %d fps", closest.getFps());
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800291 }
292
293 /* Gets video device defined frame rate (not real - consider it a maximum value) */
294 if (ioctl(fd,VIDIOC_G_PARM,&videoIn->params) < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800295 ALOGE("VIDIOC_G_PARM - Unable to get timeperframe");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800296 }
297
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800298 ALOGI("Actual format: (%d x %d), Fps: %d, pixfmt: '%c%c%c%c', bytesperline: %d",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800299 videoIn->format.fmt.pix.width,
300 videoIn->format.fmt.pix.height,
301 videoIn->params.parm.capture.timeperframe.denominator,
302 videoIn->format.fmt.pix.pixelformat & 0xFF, (videoIn->format.fmt.pix.pixelformat >> 8) & 0xFF,
303 (videoIn->format.fmt.pix.pixelformat >> 16) & 0xFF, (videoIn->format.fmt.pix.pixelformat >> 24) & 0xFF,
304 videoIn->format.fmt.pix.bytesperline);
305
306 /* Configure JPEG quality, if dealing with those formats */
307 if (videoIn->format.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG ||
308 videoIn->format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
309
310 /* Get the compression format */
311 ioctl(fd,VIDIOC_G_JPEGCOMP, &videoIn->jpegcomp);
312
313 /* Set to maximum */
314 videoIn->jpegcomp.quality = 100;
315
316 /* Try to set it */
317 if(ioctl(fd,VIDIOC_S_JPEGCOMP, &videoIn->jpegcomp) >= 0)
318 {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800319 ALOGE("VIDIOC_S_COMP:");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800320 if(errno == EINVAL)
321 {
322 videoIn->jpegcomp.quality = -1; //not supported
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800323 ALOGE(" compression control not supported\n");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800324 }
325 }
326
327 /* gets video stream jpeg compression parameters */
328 if(ioctl(fd,VIDIOC_G_JPEGCOMP, &videoIn->jpegcomp) >= 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800329 ALOGD("VIDIOC_G_COMP:\n");
330 ALOGD(" quality: %i\n", videoIn->jpegcomp.quality);
331 ALOGD(" APPn: %i\n", videoIn->jpegcomp.APPn);
332 ALOGD(" APP_len: %i\n", videoIn->jpegcomp.APP_len);
333 ALOGD(" APP_data: %s\n", videoIn->jpegcomp.APP_data);
334 ALOGD(" COM_len: %i\n", videoIn->jpegcomp.COM_len);
335 ALOGD(" COM_data: %s\n", videoIn->jpegcomp.COM_data);
336 ALOGD(" jpeg_markers: 0x%x\n", videoIn->jpegcomp.jpeg_markers);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800337 } else {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800338 ALOGE("VIDIOC_G_COMP:");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800339 if(errno == EINVAL) {
340 videoIn->jpegcomp.quality = -1; //not supported
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800341 ALOGE(" compression control not supported\n");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800342 }
343 }
344 }
345
346 /* Check if camera can handle NB_BUFFER buffers */
347 memset(&videoIn->rb,0,sizeof(videoIn->rb));
348 videoIn->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
349 videoIn->rb.memory = V4L2_MEMORY_MMAP;
350 videoIn->rb.count = NB_BUFFER;
351
352 ret = ioctl(fd, VIDIOC_REQBUFS, &videoIn->rb);
353 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800354 ALOGE("Init: VIDIOC_REQBUFS failed: %s", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800355 return ret;
356 }
357
358 for (int i = 0; i < NB_BUFFER; i++) {
359
360 memset (&videoIn->buf, 0, sizeof (struct v4l2_buffer));
361 videoIn->buf.index = i;
362 videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
363 videoIn->buf.memory = V4L2_MEMORY_MMAP;
364
365 ret = ioctl (fd, VIDIOC_QUERYBUF, &videoIn->buf);
366 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800367 ALOGE("Init: Unable to query buffer (%s)", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800368 return ret;
369 }
370
371 videoIn->mem[i] = mmap (0,
372 videoIn->buf.length,
373 PROT_READ | PROT_WRITE,
374 MAP_SHARED,
375 fd,
376 videoIn->buf.m.offset);
377
378 if (videoIn->mem[i] == MAP_FAILED) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800379 ALOGE("Init: Unable to map buffer (%s)", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800380 return -1;
381 }
382
383 ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
384 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800385 ALOGE("Init: VIDIOC_QBUF Failed");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800386 return -1;
387 }
388
389 nQueued++;
390 }
391
392 // Reserve temporary buffers, if they will be needed
393 size_t tmpbuf_size=0;
394 switch (videoIn->format.fmt.pix.pixelformat)
395 {
396 case V4L2_PIX_FMT_JPEG:
397 case V4L2_PIX_FMT_MJPEG:
398 case V4L2_PIX_FMT_UYVY:
399 case V4L2_PIX_FMT_YVYU:
400 case V4L2_PIX_FMT_YYUV:
401 case V4L2_PIX_FMT_YUV420: // only needs 3/2 bytes per pixel but we alloc 2 bytes per pixel
402 case V4L2_PIX_FMT_YVU420: // only needs 3/2 bytes per pixel but we alloc 2 bytes per pixel
403 case V4L2_PIX_FMT_Y41P: // only needs 3/2 bytes per pixel but we alloc 2 bytes per pixel
404 case V4L2_PIX_FMT_NV12:
405 case V4L2_PIX_FMT_NV21:
406 case V4L2_PIX_FMT_NV16:
407 case V4L2_PIX_FMT_NV61:
408 case V4L2_PIX_FMT_SPCA501:
409 case V4L2_PIX_FMT_SPCA505:
410 case V4L2_PIX_FMT_SPCA508:
411 case V4L2_PIX_FMT_GREY:
412 case V4L2_PIX_FMT_Y16:
413
414 case V4L2_PIX_FMT_YUYV:
415 // YUYV doesn't need a temp buffer but we will set it if/when
416 // video processing disable control is checked (bayer processing).
417 // (logitech cameras only)
418 break;
419
420 case V4L2_PIX_FMT_SGBRG8: //0
421 case V4L2_PIX_FMT_SGRBG8: //1
422 case V4L2_PIX_FMT_SBGGR8: //2
423 case V4L2_PIX_FMT_SRGGB8: //3
424 // Raw 8 bit bayer
425 // when grabbing use:
426 // bayer_to_rgb24(bayer_data, RGB24_data, width, height, 0..3)
427 // rgb2yuyv(RGB24_data, pFrameBuffer, width, height)
428
429 // alloc a temp buffer for converting to YUYV
430 // rgb buffer for decoding bayer data
431 tmpbuf_size = videoIn->format.fmt.pix.width * videoIn->format.fmt.pix.height * 3;
432 if (videoIn->tmpBuffer)
433 free(videoIn->tmpBuffer);
434 videoIn->tmpBuffer = (uint8_t*)calloc(1, tmpbuf_size);
435 if (!videoIn->tmpBuffer) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800436 ALOGE("couldn't calloc %lu bytes of memory for frame buffer\n",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800437 (unsigned long) tmpbuf_size);
438 return -ENOMEM;
439 }
440
441
442 break;
443
444 case V4L2_PIX_FMT_RGB24: //rgb or bgr (8-8-8)
445 case V4L2_PIX_FMT_BGR24:
446 break;
447
448 default:
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800449 ALOGE("Should never arrive (1)- exit fatal !!\n");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800450 return -1;
451 }
452
453 return 0;
454}
455
456void V4L2Camera::Uninit ()
457{
458 int ret;
459
460 memset(&videoIn->buf,0,sizeof(videoIn->buf));
461 videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
462 videoIn->buf.memory = V4L2_MEMORY_MMAP;
463
464 /* Dequeue everything */
465 int DQcount = nQueued - nDequeued;
466
467 for (int i = 0; i < DQcount-1; i++) {
468 ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800469 ALOGE_IF(ret < 0, "Uninit: VIDIOC_DQBUF Failed");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800470 }
471 nQueued = 0;
472 nDequeued = 0;
473
474 /* Unmap buffers */
475 for (int i = 0; i < NB_BUFFER; i++)
476 if (videoIn->mem[i] != NULL) {
477 ret = munmap(videoIn->mem[i], videoIn->buf.length);
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800478 ALOGE_IF(ret < 0, "Uninit: Unmap failed");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800479 videoIn->mem[i] = NULL;
480 }
481
482 if (videoIn->tmpBuffer)
483 free(videoIn->tmpBuffer);
484 videoIn->tmpBuffer = NULL;
485
486}
487
488int V4L2Camera::StartStreaming ()
489{
490 enum v4l2_buf_type type;
491 int ret;
492
493 if (!videoIn->isStreaming) {
494 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
495
496 ret = ioctl (fd, VIDIOC_STREAMON, &type);
497 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800498 ALOGE("StartStreaming: Unable to start capture: %s", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800499 return ret;
500 }
501
502 videoIn->isStreaming = true;
503 }
504
505 return 0;
506}
507
508int V4L2Camera::StopStreaming ()
509{
510 enum v4l2_buf_type type;
511 int ret;
512
513 if (videoIn->isStreaming) {
514 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
515
516 ret = ioctl (fd, VIDIOC_STREAMOFF, &type);
517 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800518 ALOGE("StopStreaming: Unable to stop capture: %s", strerror(errno));
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800519 return ret;
520 }
521
522 videoIn->isStreaming = false;
523 }
524
525 return 0;
526}
527
528/* Returns the effective capture size */
529void V4L2Camera::getSize(int& width, int& height) const
530{
531 width = videoIn->outWidth;
532 height = videoIn->outHeight;
533}
534
535/* Returns the effective fps */
536int V4L2Camera::getFps() const
537{
538 return videoIn->params.parm.capture.timeperframe.denominator;
539}
540
541/* Grab frame in YUYV mode */
542void V4L2Camera::GrabRawFrame (void *frameBuffer, int maxSize)
543{
544 LOG_FRAME("V4L2Camera::GrabRawFrame: frameBuffer:%p, len:%d",frameBuffer,maxSize);
545 int ret;
546
547 /* DQ */
548 memset(&videoIn->buf,0,sizeof(videoIn->buf));
549 videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
550 videoIn->buf.memory = V4L2_MEMORY_MMAP;
551 ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
552 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800553 ALOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800554 return;
555 }
556
557 nDequeued++;
558
559 // Calculate the stride of the output image (YUYV) in bytes
560 int strideOut = videoIn->outWidth << 1;
561
562 // And the pointer to the start of the image
563 uint8_t* src = (uint8_t*)videoIn->mem[videoIn->buf.index] + videoIn->capCropOffset;
564
565 LOG_FRAME("V4L2Camera::GrabRawFrame - Got Raw frame (%dx%d) (buf:%d@0x%p, len:%d)",videoIn->format.fmt.pix.width,videoIn->format.fmt.pix.height,videoIn->buf.index,src,videoIn->buf.bytesused);
566
567 /* Avoid crashing! - Make sure there is enough room in the output buffer! */
568 if (maxSize < videoIn->outFrameSize) {
569
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800570 ALOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800571
572 } else {
573
574 switch (videoIn->format.fmt.pix.pixelformat)
575 {
576 case V4L2_PIX_FMT_JPEG:
577 case V4L2_PIX_FMT_MJPEG:
578 if(videoIn->buf.bytesused <= HEADERFRAME1) {
579 // Prevent crash on empty image
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800580 ALOGE("Ignoring empty buffer ...\n");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800581 break;
582 }
583
584 if (jpeg_decode((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight) < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800585 ALOGE("jpeg decode errors\n");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800586 break;
587 }
588 break;
589
590 case V4L2_PIX_FMT_UYVY:
591 uyvy_to_yuyv((uint8_t*)frameBuffer, strideOut,
592 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
593 break;
594
595 case V4L2_PIX_FMT_YVYU:
596 yvyu_to_yuyv((uint8_t*)frameBuffer, strideOut,
597 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
598 break;
599
600 case V4L2_PIX_FMT_YYUV:
601 yyuv_to_yuyv((uint8_t*)frameBuffer, strideOut,
602 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
603 break;
604
605 case V4L2_PIX_FMT_YUV420:
606 yuv420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
607 break;
608
609 case V4L2_PIX_FMT_YVU420:
610 yvu420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
611 break;
612
613 case V4L2_PIX_FMT_NV12:
614 nv12_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
615 break;
616
617 case V4L2_PIX_FMT_NV21:
618 nv21_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
619 break;
620
621 case V4L2_PIX_FMT_NV16:
622 nv16_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
623 break;
624
625 case V4L2_PIX_FMT_NV61:
626 nv61_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
627 break;
628
629 case V4L2_PIX_FMT_Y41P:
630 y41p_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
631 break;
632
633 case V4L2_PIX_FMT_GREY:
634 grey_to_yuyv((uint8_t*)frameBuffer, strideOut,
635 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
636 break;
637
638 case V4L2_PIX_FMT_Y16:
639 y16_to_yuyv((uint8_t*)frameBuffer, strideOut,
640 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
641 break;
642
643 case V4L2_PIX_FMT_SPCA501:
644 s501_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
645 break;
646
647 case V4L2_PIX_FMT_SPCA505:
648 s505_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
649 break;
650
651 case V4L2_PIX_FMT_SPCA508:
652 s508_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
653 break;
654
655 case V4L2_PIX_FMT_YUYV:
656 {
657 int h;
658 uint8_t* pdst = (uint8_t*)frameBuffer;
659 uint8_t* psrc = src;
660 int ss = videoIn->outWidth << 1;
661 for (h = 0; h < videoIn->outHeight; h++) {
662 memcpy(pdst,psrc,ss);
663 pdst += strideOut;
664 psrc += videoIn->format.fmt.pix.bytesperline;
665 }
666 }
667 break;
668
669 case V4L2_PIX_FMT_SGBRG8: //0
670 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 0);
671 rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
672 (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
673 break;
674
675 case V4L2_PIX_FMT_SGRBG8: //1
676 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 1);
677 rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
678 (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
679 break;
680
681 case V4L2_PIX_FMT_SBGGR8: //2
682 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 2);
683 rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
684 (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
685 break;
686
687 case V4L2_PIX_FMT_SRGGB8: //3
688 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 3);
689 rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
690 (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
691 break;
692
693 case V4L2_PIX_FMT_RGB24:
694 rgb_to_yuyv((uint8_t*) frameBuffer, strideOut,
695 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
696 break;
697
698 case V4L2_PIX_FMT_BGR24:
699 bgr_to_yuyv((uint8_t*) frameBuffer, strideOut,
700 src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
701 break;
702
703 default:
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800704 ALOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800705 break;
706 }
707
708 LOG_FRAME("V4L2Camera::GrabRawFrame - Copied frame to destination 0x%p",frameBuffer);
709 }
710
711 /* And Queue the buffer again */
712 ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
713 if (ret < 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800714 ALOGE("GrabPreviewFrame: VIDIOC_QBUF Failed");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800715 return;
716 }
717
718 nQueued++;
719
720 LOG_FRAME("V4L2Camera::GrabRawFrame - Queued buffer");
721
722}
723
724/* enumerate frame intervals (fps)
725 * args:
726 * pixfmt: v4l2 pixel format that we want to list frame intervals for
727 * width: video width that we want to list frame intervals for
728 * height: video height that we want to list frame intervals for
729 *
730 * returns 0 if enumeration succeded or errno otherwise */
731bool V4L2Camera::EnumFrameIntervals(int pixfmt, int width, int height)
732{
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800733 ALOGD("V4L2Camera::EnumFrameIntervals: pixfmt: 0x%08x, w:%d, h:%d",pixfmt,width,height);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800734
735 struct v4l2_frmivalenum fival;
736 int list_fps=0;
737 memset(&fival, 0, sizeof(fival));
738 fival.index = 0;
739 fival.pixel_format = pixfmt;
740 fival.width = width;
741 fival.height = height;
742
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800743 ALOGD("\tTime interval between frame: ");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800744 while (ioctl(fd,VIDIOC_ENUM_FRAMEINTERVALS, &fival) >= 0)
745 {
746 fival.index++;
747 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800748 ALOGD("%u/%u", fival.discrete.numerator, fival.discrete.denominator);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800749
750 m_AllFmts.add( SurfaceDesc( width, height, fival.discrete.denominator ) );
751 list_fps++;
752 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800753 ALOGD("{min { %u/%u } .. max { %u/%u } }",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800754 fival.stepwise.min.numerator, fival.stepwise.min.numerator,
755 fival.stepwise.max.denominator, fival.stepwise.max.denominator);
756 break;
757 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800758 ALOGD("{min { %u/%u } .. max { %u/%u } / "
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800759 "stepsize { %u/%u } }",
760 fival.stepwise.min.numerator, fival.stepwise.min.denominator,
761 fival.stepwise.max.numerator, fival.stepwise.max.denominator,
762 fival.stepwise.step.numerator, fival.stepwise.step.denominator);
763 break;
764 }
765 }
766
767 // Assume at least 1fps
768 if (list_fps == 0) {
769 m_AllFmts.add( SurfaceDesc( width, height, 1 ) );
770 }
771
772 return true;
773}
774
775/* enumerate frame sizes
776 * pixfmt: v4l2 pixel format that we want to list frame sizes for
777 *
778 * returns 0 if enumeration succeded or errno otherwise */
779bool V4L2Camera::EnumFrameSizes(int pixfmt)
780{
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800781 ALOGD("V4L2Camera::EnumFrameSizes: pixfmt: 0x%08x",pixfmt);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800782 int fsizeind = 0;
783 struct v4l2_frmsizeenum fsize;
784
785 memset(&fsize, 0, sizeof(fsize));
786 fsize.index = 0;
787 fsize.pixel_format = pixfmt;
788 while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &fsize) >= 0) {
789 fsize.index++;
790 if (fsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800791 ALOGD("{ discrete: width = %u, height = %u }",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800792 fsize.discrete.width, fsize.discrete.height);
793
794 fsizeind++;
795
796 if (!EnumFrameIntervals(pixfmt,fsize.discrete.width, fsize.discrete.height))
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800797 ALOGD(" Unable to enumerate frame intervals");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800798 } else if (fsize.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800799 ALOGD("{ continuous: min { width = %u, height = %u } .. "
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800800 "max { width = %u, height = %u } }",
801 fsize.stepwise.min_width, fsize.stepwise.min_height,
802 fsize.stepwise.max_width, fsize.stepwise.max_height);
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800803 ALOGD(" will not enumerate frame intervals.\n");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800804 } else if (fsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800805 ALOGD("{ stepwise: min { width = %u, height = %u } .. "
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800806 "max { width = %u, height = %u } / "
807 "stepsize { width = %u, height = %u } }",
808 fsize.stepwise.min_width, fsize.stepwise.min_height,
809 fsize.stepwise.max_width, fsize.stepwise.max_height,
810 fsize.stepwise.step_width, fsize.stepwise.step_height);
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800811 ALOGD(" will not enumerate frame intervals.");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800812 } else {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800813 ALOGE(" fsize.type not supported: %d\n", fsize.type);
814 ALOGE(" (Discrete: %d Continuous: %d Stepwise: %d)",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800815 V4L2_FRMSIZE_TYPE_DISCRETE,
816 V4L2_FRMSIZE_TYPE_CONTINUOUS,
817 V4L2_FRMSIZE_TYPE_STEPWISE);
818 }
819 }
820
821 if (fsizeind == 0) {
822 /* ------ gspca doesn't enumerate frame sizes ------ */
823 /* negotiate with VIDIOC_TRY_FMT instead */
824 static const struct {
825 int w,h;
826 } defMode[] = {
827 {800,600},
828 {768,576},
829 {768,480},
830 {720,576},
831 {720,480},
832 {704,576},
833 {704,480},
834 {640,480},
835 {352,288},
836 {320,240}
837 };
838
839 unsigned int i;
840 for (i = 0 ; i < (sizeof(defMode) / sizeof(defMode[0])); i++) {
841
842 fsizeind++;
843 struct v4l2_format fmt;
844 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
845 fmt.fmt.pix.width = defMode[i].w;
846 fmt.fmt.pix.height = defMode[i].h;
847 fmt.fmt.pix.pixelformat = pixfmt;
848 fmt.fmt.pix.field = V4L2_FIELD_ANY;
849
850 if (ioctl(fd,VIDIOC_TRY_FMT, &fmt) >= 0) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800851 ALOGD("{ ?GSPCA? : width = %u, height = %u }\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800852
853 // Add the mode descriptor
854 m_AllFmts.add( SurfaceDesc( fmt.fmt.pix.width, fmt.fmt.pix.height, 25 ) );
855 }
856 }
857 }
858
859 return true;
860}
861
862/* enumerate frames (formats, sizes and fps)
863 * args:
864 * width: current selected width
865 * height: current selected height
866 *
867 * returns: pointer to LFormats struct containing list of available frame formats */
868bool V4L2Camera::EnumFrameFormats()
869{
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800870 ALOGD("V4L2Camera::EnumFrameFormats");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800871 struct v4l2_fmtdesc fmt;
872
873 // Start with no modes
874 m_AllFmts.clear();
875
876 memset(&fmt, 0, sizeof(fmt));
877 fmt.index = 0;
878 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
879
880 while (ioctl(fd,VIDIOC_ENUM_FMT, &fmt) >= 0) {
881 fmt.index++;
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800882 ALOGD("{ pixelformat = '%c%c%c%c', description = '%s' }",
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800883 fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
884 (fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
885 fmt.description);
886
887 //enumerate frame sizes for this pixel format
888 if (!EnumFrameSizes(fmt.pixelformat)) {
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800889 ALOGE(" Unable to enumerate frame sizes.");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800890 }
891 };
892
893 // Now, select the best preview format and the best PictureFormat
894 m_BestPreviewFmt = SurfaceDesc();
895 m_BestPictureFmt = SurfaceDesc();
896
897 unsigned int i;
898 for (i=0; i<m_AllFmts.size(); i++) {
899 SurfaceDesc s = m_AllFmts[i];
900
901 // Prioritize size over everything else when taking pictures. use the
902 // least fps possible, as that usually means better quality
903 if ((s.getSize() > m_BestPictureFmt.getSize()) ||
904 (s.getSize() == m_BestPictureFmt.getSize() && s.getFps() < m_BestPictureFmt.getFps() )
905 ) {
906 m_BestPictureFmt = s;
907 }
908
909 // Prioritize fps, then size when doing preview
910 if ((s.getFps() > m_BestPreviewFmt.getFps()) ||
911 (s.getFps() == m_BestPreviewFmt.getFps() && s.getSize() > m_BestPreviewFmt.getSize() )
912 ) {
913 m_BestPreviewFmt = s;
914 }
915
916 }
917
918 return true;
919}
920
921SortedVector<SurfaceSize> V4L2Camera::getAvailableSizes() const
922{
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800923 ALOGD("V4L2Camera::getAvailableSizes");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800924 SortedVector<SurfaceSize> ret;
925
926 // Iterate through the list. All duplicated entries will be removed
927 unsigned int i;
928 for (i = 0; i< m_AllFmts.size() ; i++) {
929 ret.add(m_AllFmts[i].getSize());
930 }
931 return ret;
932}
933
934
935SortedVector<int> V4L2Camera::getAvailableFps() const
936{
Chih-Wei Huangdf9613f2013-02-20 15:46:19 +0800937 ALOGD("V4L2Camera::getAvailableFps");
Chih-Wei Huang0d92eda2012-02-07 16:55:49 +0800938 SortedVector<int> ret;
939
940 // Iterate through the list. All duplicated entries will be removed
941 unsigned int i;
942 for (i = 0; i< m_AllFmts.size() ; i++) {
943 ret.add(m_AllFmts[i].getFps());
944 }
945 return ret;
946
947}
948
949const SurfaceDesc& V4L2Camera::getBestPreviewFmt() const
950{
951 return m_BestPreviewFmt;
952}
953
954const SurfaceDesc& V4L2Camera::getBestPictureFmt() const
955{
956 return m_BestPictureFmt;
957}
958
959
960}; // namespace android