blob: e524d768b8b80e2bfd23665c35770c7bee933b3e [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
143
144const QCamera3HardwareInterface::QCameraMap<
145 camera_metadata_enum_android_ir_mode_t,
146 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
147 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
148 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
149 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
150};
Thierry Strudel3d639192016-09-09 11:52:26 -0700151
152const QCamera3HardwareInterface::QCameraMap<
153 camera_metadata_enum_android_control_effect_mode_t,
154 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
155 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
156 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
157 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
158 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
159 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
160 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
161 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
162 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
163 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
164};
165
166const QCamera3HardwareInterface::QCameraMap<
167 camera_metadata_enum_android_control_awb_mode_t,
168 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
169 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
170 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
171 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
172 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
173 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
174 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
175 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
176 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
177 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
178};
179
180const QCamera3HardwareInterface::QCameraMap<
181 camera_metadata_enum_android_control_scene_mode_t,
182 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
183 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
184 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
185 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
186 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
187 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
188 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
189 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
190 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
191 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
192 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
193 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
194 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
195 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
196 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
197 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800198 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
199 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700200};
201
202const QCamera3HardwareInterface::QCameraMap<
203 camera_metadata_enum_android_control_af_mode_t,
204 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
205 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
206 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
207 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
208 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
209 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
210 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
211 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
212};
213
214const QCamera3HardwareInterface::QCameraMap<
215 camera_metadata_enum_android_color_correction_aberration_mode_t,
216 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
217 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
218 CAM_COLOR_CORRECTION_ABERRATION_OFF },
219 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
220 CAM_COLOR_CORRECTION_ABERRATION_FAST },
221 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
222 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_control_ae_antibanding_mode_t,
227 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
228 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
229 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
230 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
231 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_ae_mode_t,
236 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
237 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
238 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
239 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
240 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
241 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
242};
243
244const QCamera3HardwareInterface::QCameraMap<
245 camera_metadata_enum_android_flash_mode_t,
246 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
247 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
248 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
249 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_statistics_face_detect_mode_t,
254 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
255 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
256 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
257 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
262 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
263 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
264 CAM_FOCUS_UNCALIBRATED },
265 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
266 CAM_FOCUS_APPROXIMATE },
267 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
268 CAM_FOCUS_CALIBRATED }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_lens_state_t,
273 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
274 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
275 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
276};
277
278const int32_t available_thumbnail_sizes[] = {0, 0,
279 176, 144,
280 240, 144,
281 256, 144,
282 240, 160,
283 256, 154,
284 240, 240,
285 320, 240};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_sensor_test_pattern_mode_t,
289 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
291 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
292 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
293 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
294 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
295 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
296};
297
298/* Since there is no mapping for all the options some Android enum are not listed.
299 * Also, the order in this list is important because while mapping from HAL to Android it will
300 * traverse from lower to higher index which means that for HAL values that are map to different
301 * Android values, the traverse logic will select the first one found.
302 */
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_sensor_reference_illuminant1_t,
305 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
322};
323
324const QCamera3HardwareInterface::QCameraMap<
325 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
326 { 60, CAM_HFR_MODE_60FPS},
327 { 90, CAM_HFR_MODE_90FPS},
328 { 120, CAM_HFR_MODE_120FPS},
329 { 150, CAM_HFR_MODE_150FPS},
330 { 180, CAM_HFR_MODE_180FPS},
331 { 210, CAM_HFR_MODE_210FPS},
332 { 240, CAM_HFR_MODE_240FPS},
333 { 480, CAM_HFR_MODE_480FPS},
334};
335
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700336const QCamera3HardwareInterface::QCameraMap<
337 qcamera3_ext_instant_aec_mode_t,
338 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
339 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
340 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
341 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
342};
Thierry Strudel3d639192016-09-09 11:52:26 -0700343camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
344 .initialize = QCamera3HardwareInterface::initialize,
345 .configure_streams = QCamera3HardwareInterface::configure_streams,
346 .register_stream_buffers = NULL,
347 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
348 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
349 .get_metadata_vendor_tag_ops = NULL,
350 .dump = QCamera3HardwareInterface::dump,
351 .flush = QCamera3HardwareInterface::flush,
352 .reserved = {0},
353};
354
355// initialise to some default value
356uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
357
358/*===========================================================================
359 * FUNCTION : QCamera3HardwareInterface
360 *
361 * DESCRIPTION: constructor of QCamera3HardwareInterface
362 *
363 * PARAMETERS :
364 * @cameraId : camera ID
365 *
366 * RETURN : none
367 *==========================================================================*/
368QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
369 const camera_module_callbacks_t *callbacks)
370 : mCameraId(cameraId),
371 mCameraHandle(NULL),
372 mCameraInitialized(false),
373 mCallbackOps(NULL),
374 mMetadataChannel(NULL),
375 mPictureChannel(NULL),
376 mRawChannel(NULL),
377 mSupportChannel(NULL),
378 mAnalysisChannel(NULL),
379 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700380 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700381 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800382 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700383 mCommon(),
384 mChannelHandle(0),
385 mFirstConfiguration(true),
386 mFlush(false),
387 mFlushPerf(false),
388 mParamHeap(NULL),
389 mParameters(NULL),
390 mPrevParameters(NULL),
391 m_bIsVideo(false),
392 m_bIs4KVideo(false),
393 m_bEisSupportedSize(false),
394 m_bEisEnable(false),
395 m_MobicatMask(0),
396 mMinProcessedFrameDuration(0),
397 mMinJpegFrameDuration(0),
398 mMinRawFrameDuration(0),
399 mMetaFrameCount(0U),
400 mUpdateDebugLevel(false),
401 mCallbacks(callbacks),
402 mCaptureIntent(0),
403 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800404 /* DevCamDebug metadata internal m control*/
405 mDevCamDebugMetaEnable(0),
406 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700407 mBatchSize(0),
408 mToBeQueuedVidBufs(0),
409 mHFRVideoFps(DEFAULT_VIDEO_FPS),
410 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
411 mFirstFrameNumberInBatch(0),
412 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800413 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700414 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
415 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700416 mInstantAEC(false),
417 mResetInstantAEC(false),
418 mInstantAECSettledFrameNumber(0),
419 mAecSkipDisplayFrameBound(0),
420 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700422 mLastCustIntentFrmNum(-1),
423 mState(CLOSED),
424 mIsDeviceLinked(false),
425 mIsMainCamera(true),
426 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700427 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800428 m_pDualCamCmdPtr(NULL),
429 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700430{
431 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700432 mCommon.init(gCamCapability[cameraId]);
433 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700434#ifndef USE_HAL_3_3
435 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
436#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700437 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700438#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700439 mCameraDevice.common.close = close_camera_device;
440 mCameraDevice.ops = &mCameraOps;
441 mCameraDevice.priv = this;
442 gCamCapability[cameraId]->version = CAM_HAL_V3;
443 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
444 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
445 gCamCapability[cameraId]->min_num_pp_bufs = 3;
446
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800447 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700448
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800449 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700450 mPendingLiveRequest = 0;
451 mCurrentRequestId = -1;
452 pthread_mutex_init(&mMutex, NULL);
453
454 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
455 mDefaultMetadata[i] = NULL;
456
457 // Getting system props of different kinds
458 char prop[PROPERTY_VALUE_MAX];
459 memset(prop, 0, sizeof(prop));
460 property_get("persist.camera.raw.dump", prop, "0");
461 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800462 property_get("persist.camera.hal3.force.hdr", prop, "0");
463 mForceHdrSnapshot = atoi(prop);
464
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 if (mEnableRawDump)
466 LOGD("Raw dump from Camera HAL enabled");
467
468 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
469 memset(mLdafCalib, 0, sizeof(mLdafCalib));
470
471 memset(prop, 0, sizeof(prop));
472 property_get("persist.camera.tnr.preview", prop, "0");
473 m_bTnrPreview = (uint8_t)atoi(prop);
474
475 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800476 property_get("persist.camera.swtnr.preview", prop, "1");
477 m_bSwTnrPreview = (uint8_t)atoi(prop);
478
479 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 property_get("persist.camera.tnr.video", prop, "0");
481 m_bTnrVideo = (uint8_t)atoi(prop);
482
483 memset(prop, 0, sizeof(prop));
484 property_get("persist.camera.avtimer.debug", prop, "0");
485 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800486 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700487
488 //Load and read GPU library.
489 lib_surface_utils = NULL;
490 LINK_get_surface_pixel_alignment = NULL;
491 mSurfaceStridePadding = CAM_PAD_TO_32;
492 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
493 if (lib_surface_utils) {
494 *(void **)&LINK_get_surface_pixel_alignment =
495 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
496 if (LINK_get_surface_pixel_alignment) {
497 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
498 }
499 dlclose(lib_surface_utils);
500 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700501
502 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700503}
504
505/*===========================================================================
506 * FUNCTION : ~QCamera3HardwareInterface
507 *
508 * DESCRIPTION: destructor of QCamera3HardwareInterface
509 *
510 * PARAMETERS : none
511 *
512 * RETURN : none
513 *==========================================================================*/
514QCamera3HardwareInterface::~QCamera3HardwareInterface()
515{
516 LOGD("E");
517
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800518 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700519
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800520 // Disable power hint and enable the perf lock for close camera
521 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
522 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
523
524 // unlink of dualcam during close camera
525 if (mIsDeviceLinked) {
526 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
527 &m_pDualCamCmdPtr->bundle_info;
528 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
529 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
530 pthread_mutex_lock(&gCamLock);
531
532 if (mIsMainCamera == 1) {
533 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
534 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
535 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
536 // related session id should be session id of linked session
537 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
538 } else {
539 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
540 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
541 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
542 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
543 }
544 pthread_mutex_unlock(&gCamLock);
545
546 rc = mCameraHandle->ops->set_dual_cam_cmd(
547 mCameraHandle->camera_handle);
548 if (rc < 0) {
549 LOGE("Dualcam: Unlink failed, but still proceed to close");
550 }
551 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700552
553 /* We need to stop all streams before deleting any stream */
554 if (mRawDumpChannel) {
555 mRawDumpChannel->stop();
556 }
557
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700558 if (mHdrPlusRawSrcChannel) {
559 mHdrPlusRawSrcChannel->stop();
560 }
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 // NOTE: 'camera3_stream_t *' objects are already freed at
563 // this stage by the framework
564 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
565 it != mStreamInfo.end(); it++) {
566 QCamera3ProcessingChannel *channel = (*it)->channel;
567 if (channel) {
568 channel->stop();
569 }
570 }
571 if (mSupportChannel)
572 mSupportChannel->stop();
573
574 if (mAnalysisChannel) {
575 mAnalysisChannel->stop();
576 }
577 if (mMetadataChannel) {
578 mMetadataChannel->stop();
579 }
580 if (mChannelHandle) {
581 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
582 mChannelHandle);
583 LOGD("stopping channel %d", mChannelHandle);
584 }
585
586 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
587 it != mStreamInfo.end(); it++) {
588 QCamera3ProcessingChannel *channel = (*it)->channel;
589 if (channel)
590 delete channel;
591 free (*it);
592 }
593 if (mSupportChannel) {
594 delete mSupportChannel;
595 mSupportChannel = NULL;
596 }
597
598 if (mAnalysisChannel) {
599 delete mAnalysisChannel;
600 mAnalysisChannel = NULL;
601 }
602 if (mRawDumpChannel) {
603 delete mRawDumpChannel;
604 mRawDumpChannel = NULL;
605 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700606 if (mHdrPlusRawSrcChannel) {
607 delete mHdrPlusRawSrcChannel;
608 mHdrPlusRawSrcChannel = NULL;
609 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700610 if (mDummyBatchChannel) {
611 delete mDummyBatchChannel;
612 mDummyBatchChannel = NULL;
613 }
614
615 mPictureChannel = NULL;
616
617 if (mMetadataChannel) {
618 delete mMetadataChannel;
619 mMetadataChannel = NULL;
620 }
621
622 /* Clean up all channels */
623 if (mCameraInitialized) {
624 if(!mFirstConfiguration){
625 //send the last unconfigure
626 cam_stream_size_info_t stream_config_info;
627 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
628 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
629 stream_config_info.buffer_info.max_buffers =
630 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700631 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700632 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
633 stream_config_info);
634 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
635 if (rc < 0) {
636 LOGE("set_parms failed for unconfigure");
637 }
638 }
639 deinitParameters();
640 }
641
642 if (mChannelHandle) {
643 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
644 mChannelHandle);
645 LOGH("deleting channel %d", mChannelHandle);
646 mChannelHandle = 0;
647 }
648
649 if (mState != CLOSED)
650 closeCamera();
651
652 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
653 req.mPendingBufferList.clear();
654 }
655 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700656 for (pendingRequestIterator i = mPendingRequestsList.begin();
657 i != mPendingRequestsList.end();) {
658 i = erasePendingRequest(i);
659 }
660 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
661 if (mDefaultMetadata[i])
662 free_camera_metadata(mDefaultMetadata[i]);
663
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800664 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700665
666 pthread_cond_destroy(&mRequestCond);
667
668 pthread_cond_destroy(&mBuffersCond);
669
670 pthread_mutex_destroy(&mMutex);
671 LOGD("X");
672}
673
674/*===========================================================================
675 * FUNCTION : erasePendingRequest
676 *
677 * DESCRIPTION: function to erase a desired pending request after freeing any
678 * allocated memory
679 *
680 * PARAMETERS :
681 * @i : iterator pointing to pending request to be erased
682 *
683 * RETURN : iterator pointing to the next request
684 *==========================================================================*/
685QCamera3HardwareInterface::pendingRequestIterator
686 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
687{
688 if (i->input_buffer != NULL) {
689 free(i->input_buffer);
690 i->input_buffer = NULL;
691 }
692 if (i->settings != NULL)
693 free_camera_metadata((camera_metadata_t*)i->settings);
694 return mPendingRequestsList.erase(i);
695}
696
697/*===========================================================================
698 * FUNCTION : camEvtHandle
699 *
700 * DESCRIPTION: Function registered to mm-camera-interface to handle events
701 *
702 * PARAMETERS :
703 * @camera_handle : interface layer camera handle
704 * @evt : ptr to event
705 * @user_data : user data ptr
706 *
707 * RETURN : none
708 *==========================================================================*/
709void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
710 mm_camera_event_t *evt,
711 void *user_data)
712{
713 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
714 if (obj && evt) {
715 switch(evt->server_event_type) {
716 case CAM_EVENT_TYPE_DAEMON_DIED:
717 pthread_mutex_lock(&obj->mMutex);
718 obj->mState = ERROR;
719 pthread_mutex_unlock(&obj->mMutex);
720 LOGE("Fatal, camera daemon died");
721 break;
722
723 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
724 LOGD("HAL got request pull from Daemon");
725 pthread_mutex_lock(&obj->mMutex);
726 obj->mWokenUpByDaemon = true;
727 obj->unblockRequestIfNecessary();
728 pthread_mutex_unlock(&obj->mMutex);
729 break;
730
731 default:
732 LOGW("Warning: Unhandled event %d",
733 evt->server_event_type);
734 break;
735 }
736 } else {
737 LOGE("NULL user_data/evt");
738 }
739}
740
741/*===========================================================================
742 * FUNCTION : openCamera
743 *
744 * DESCRIPTION: open camera
745 *
746 * PARAMETERS :
747 * @hw_device : double ptr for camera device struct
748 *
749 * RETURN : int32_t type of status
750 * NO_ERROR -- success
751 * none-zero failure code
752 *==========================================================================*/
753int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
754{
755 int rc = 0;
756 if (mState != CLOSED) {
757 *hw_device = NULL;
758 return PERMISSION_DENIED;
759 }
760
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800761 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
763 mCameraId);
764
765 rc = openCamera();
766 if (rc == 0) {
767 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700769 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
Thierry Strudel3d639192016-09-09 11:52:26 -0700772 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
773 mCameraId, rc);
774
775 if (rc == NO_ERROR) {
776 mState = OPENED;
777 }
778 return rc;
779}
780
781/*===========================================================================
782 * FUNCTION : openCamera
783 *
784 * DESCRIPTION: open camera
785 *
786 * PARAMETERS : none
787 *
788 * RETURN : int32_t type of status
789 * NO_ERROR -- success
790 * none-zero failure code
791 *==========================================================================*/
792int QCamera3HardwareInterface::openCamera()
793{
794 int rc = 0;
795 char value[PROPERTY_VALUE_MAX];
796
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800797 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700798 if (mCameraHandle) {
799 LOGE("Failure: Camera already opened");
800 return ALREADY_EXISTS;
801 }
802
803 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
804 if (rc < 0) {
805 LOGE("Failed to reserve flash for camera id: %d",
806 mCameraId);
807 return UNKNOWN_ERROR;
808 }
809
810 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
811 if (rc) {
812 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
813 return rc;
814 }
815
816 if (!mCameraHandle) {
817 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
818 return -ENODEV;
819 }
820
821 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
822 camEvtHandle, (void *)this);
823
824 if (rc < 0) {
825 LOGE("Error, failed to register event callback");
826 /* Not closing camera here since it is already handled in destructor */
827 return FAILED_TRANSACTION;
828 }
829
830 mExifParams.debug_params =
831 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
832 if (mExifParams.debug_params) {
833 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
834 } else {
835 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
836 return NO_MEMORY;
837 }
838 mFirstConfiguration = true;
839
840 //Notify display HAL that a camera session is active.
841 //But avoid calling the same during bootup because camera service might open/close
842 //cameras at boot time during its initialization and display service will also internally
843 //wait for camera service to initialize first while calling this display API, resulting in a
844 //deadlock situation. Since boot time camera open/close calls are made only to fetch
845 //capabilities, no need of this display bw optimization.
846 //Use "service.bootanim.exit" property to know boot status.
847 property_get("service.bootanim.exit", value, "0");
848 if (atoi(value) == 1) {
849 pthread_mutex_lock(&gCamLock);
850 if (gNumCameraSessions++ == 0) {
851 setCameraLaunchStatus(true);
852 }
853 pthread_mutex_unlock(&gCamLock);
854 }
855
856 //fill the session id needed while linking dual cam
857 pthread_mutex_lock(&gCamLock);
858 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
859 &sessionId[mCameraId]);
860 pthread_mutex_unlock(&gCamLock);
861
862 if (rc < 0) {
863 LOGE("Error, failed to get sessiion id");
864 return UNKNOWN_ERROR;
865 } else {
866 //Allocate related cam sync buffer
867 //this is needed for the payload that goes along with bundling cmd for related
868 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700869 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
870 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700871 if(rc != OK) {
872 rc = NO_MEMORY;
873 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
874 return NO_MEMORY;
875 }
876
877 //Map memory for related cam sync buffer
878 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700879 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
880 m_pDualCamCmdHeap->getFd(0),
881 sizeof(cam_dual_camera_cmd_info_t),
882 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700883 if(rc < 0) {
884 LOGE("Dualcam: failed to map Related cam sync buffer");
885 rc = FAILED_TRANSACTION;
886 return NO_MEMORY;
887 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700888 m_pDualCamCmdPtr =
889 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700890 }
891
892 LOGH("mCameraId=%d",mCameraId);
893
894 return NO_ERROR;
895}
896
897/*===========================================================================
898 * FUNCTION : closeCamera
899 *
900 * DESCRIPTION: close camera
901 *
902 * PARAMETERS : none
903 *
904 * RETURN : int32_t type of status
905 * NO_ERROR -- success
906 * none-zero failure code
907 *==========================================================================*/
908int QCamera3HardwareInterface::closeCamera()
909{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800910 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 int rc = NO_ERROR;
912 char value[PROPERTY_VALUE_MAX];
913
914 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
915 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700916
917 // unmap memory for related cam sync buffer
918 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800919 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700920 if (NULL != m_pDualCamCmdHeap) {
921 m_pDualCamCmdHeap->deallocate();
922 delete m_pDualCamCmdHeap;
923 m_pDualCamCmdHeap = NULL;
924 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700925 }
926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
928 mCameraHandle = NULL;
929
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700930 // Disconnect from HDR+ client.
931 if (mHdrPlusClient != nullptr) {
932 mHdrPlusClient->disconnect();
933 mHdrPlusClient = nullptr;
934 }
935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 //reset session id to some invalid id
937 pthread_mutex_lock(&gCamLock);
938 sessionId[mCameraId] = 0xDEADBEEF;
939 pthread_mutex_unlock(&gCamLock);
940
941 //Notify display HAL that there is no active camera session
942 //but avoid calling the same during bootup. Refer to openCamera
943 //for more details.
944 property_get("service.bootanim.exit", value, "0");
945 if (atoi(value) == 1) {
946 pthread_mutex_lock(&gCamLock);
947 if (--gNumCameraSessions == 0) {
948 setCameraLaunchStatus(false);
949 }
950 pthread_mutex_unlock(&gCamLock);
951 }
952
Thierry Strudel3d639192016-09-09 11:52:26 -0700953 if (mExifParams.debug_params) {
954 free(mExifParams.debug_params);
955 mExifParams.debug_params = NULL;
956 }
957 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
958 LOGW("Failed to release flash for camera id: %d",
959 mCameraId);
960 }
961 mState = CLOSED;
962 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
963 mCameraId, rc);
964 return rc;
965}
966
967/*===========================================================================
968 * FUNCTION : initialize
969 *
970 * DESCRIPTION: Initialize frameworks callback functions
971 *
972 * PARAMETERS :
973 * @callback_ops : callback function to frameworks
974 *
975 * RETURN :
976 *
977 *==========================================================================*/
978int QCamera3HardwareInterface::initialize(
979 const struct camera3_callback_ops *callback_ops)
980{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800981 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700982 int rc;
983
984 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
985 pthread_mutex_lock(&mMutex);
986
987 // Validate current state
988 switch (mState) {
989 case OPENED:
990 /* valid state */
991 break;
992 default:
993 LOGE("Invalid state %d", mState);
994 rc = -ENODEV;
995 goto err1;
996 }
997
998 rc = initParameters();
999 if (rc < 0) {
1000 LOGE("initParamters failed %d", rc);
1001 goto err1;
1002 }
1003 mCallbackOps = callback_ops;
1004
1005 mChannelHandle = mCameraHandle->ops->add_channel(
1006 mCameraHandle->camera_handle, NULL, NULL, this);
1007 if (mChannelHandle == 0) {
1008 LOGE("add_channel failed");
1009 rc = -ENOMEM;
1010 pthread_mutex_unlock(&mMutex);
1011 return rc;
1012 }
1013
1014 pthread_mutex_unlock(&mMutex);
1015 mCameraInitialized = true;
1016 mState = INITIALIZED;
1017 LOGI("X");
1018 return 0;
1019
1020err1:
1021 pthread_mutex_unlock(&mMutex);
1022 return rc;
1023}
1024
1025/*===========================================================================
1026 * FUNCTION : validateStreamDimensions
1027 *
1028 * DESCRIPTION: Check if the configuration requested are those advertised
1029 *
1030 * PARAMETERS :
1031 * @stream_list : streams to be configured
1032 *
1033 * RETURN :
1034 *
1035 *==========================================================================*/
1036int QCamera3HardwareInterface::validateStreamDimensions(
1037 camera3_stream_configuration_t *streamList)
1038{
1039 int rc = NO_ERROR;
1040 size_t count = 0;
1041
1042 camera3_stream_t *inputStream = NULL;
1043 /*
1044 * Loop through all streams to find input stream if it exists*
1045 */
1046 for (size_t i = 0; i< streamList->num_streams; i++) {
1047 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1048 if (inputStream != NULL) {
1049 LOGE("Error, Multiple input streams requested");
1050 return -EINVAL;
1051 }
1052 inputStream = streamList->streams[i];
1053 }
1054 }
1055 /*
1056 * Loop through all streams requested in configuration
1057 * Check if unsupported sizes have been requested on any of them
1058 */
1059 for (size_t j = 0; j < streamList->num_streams; j++) {
1060 bool sizeFound = false;
1061 camera3_stream_t *newStream = streamList->streams[j];
1062
1063 uint32_t rotatedHeight = newStream->height;
1064 uint32_t rotatedWidth = newStream->width;
1065 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1066 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1067 rotatedHeight = newStream->width;
1068 rotatedWidth = newStream->height;
1069 }
1070
1071 /*
1072 * Sizes are different for each type of stream format check against
1073 * appropriate table.
1074 */
1075 switch (newStream->format) {
1076 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1077 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1078 case HAL_PIXEL_FORMAT_RAW10:
1079 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1080 for (size_t i = 0; i < count; i++) {
1081 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1082 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1083 sizeFound = true;
1084 break;
1085 }
1086 }
1087 break;
1088 case HAL_PIXEL_FORMAT_BLOB:
1089 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1090 /* Verify set size against generated sizes table */
1091 for (size_t i = 0; i < count; i++) {
1092 if (((int32_t)rotatedWidth ==
1093 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1094 ((int32_t)rotatedHeight ==
1095 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1096 sizeFound = true;
1097 break;
1098 }
1099 }
1100 break;
1101 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1102 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1103 default:
1104 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1105 || newStream->stream_type == CAMERA3_STREAM_INPUT
1106 || IS_USAGE_ZSL(newStream->usage)) {
1107 if (((int32_t)rotatedWidth ==
1108 gCamCapability[mCameraId]->active_array_size.width) &&
1109 ((int32_t)rotatedHeight ==
1110 gCamCapability[mCameraId]->active_array_size.height)) {
1111 sizeFound = true;
1112 break;
1113 }
1114 /* We could potentially break here to enforce ZSL stream
1115 * set from frameworks always is full active array size
1116 * but it is not clear from the spc if framework will always
1117 * follow that, also we have logic to override to full array
1118 * size, so keeping the logic lenient at the moment
1119 */
1120 }
1121 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1122 MAX_SIZES_CNT);
1123 for (size_t i = 0; i < count; i++) {
1124 if (((int32_t)rotatedWidth ==
1125 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1126 ((int32_t)rotatedHeight ==
1127 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1128 sizeFound = true;
1129 break;
1130 }
1131 }
1132 break;
1133 } /* End of switch(newStream->format) */
1134
1135 /* We error out even if a single stream has unsupported size set */
1136 if (!sizeFound) {
1137 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1138 rotatedWidth, rotatedHeight, newStream->format,
1139 gCamCapability[mCameraId]->active_array_size.width,
1140 gCamCapability[mCameraId]->active_array_size.height);
1141 rc = -EINVAL;
1142 break;
1143 }
1144 } /* End of for each stream */
1145 return rc;
1146}
1147
1148/*==============================================================================
1149 * FUNCTION : isSupportChannelNeeded
1150 *
1151 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1152 *
1153 * PARAMETERS :
1154 * @stream_list : streams to be configured
1155 * @stream_config_info : the config info for streams to be configured
1156 *
1157 * RETURN : Boolen true/false decision
1158 *
1159 *==========================================================================*/
1160bool QCamera3HardwareInterface::isSupportChannelNeeded(
1161 camera3_stream_configuration_t *streamList,
1162 cam_stream_size_info_t stream_config_info)
1163{
1164 uint32_t i;
1165 bool pprocRequested = false;
1166 /* Check for conditions where PProc pipeline does not have any streams*/
1167 for (i = 0; i < stream_config_info.num_streams; i++) {
1168 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1169 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1170 pprocRequested = true;
1171 break;
1172 }
1173 }
1174
1175 if (pprocRequested == false )
1176 return true;
1177
1178 /* Dummy stream needed if only raw or jpeg streams present */
1179 for (i = 0; i < streamList->num_streams; i++) {
1180 switch(streamList->streams[i]->format) {
1181 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1182 case HAL_PIXEL_FORMAT_RAW10:
1183 case HAL_PIXEL_FORMAT_RAW16:
1184 case HAL_PIXEL_FORMAT_BLOB:
1185 break;
1186 default:
1187 return false;
1188 }
1189 }
1190 return true;
1191}
1192
1193/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001194 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001195 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001196 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001197 *
1198 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001199 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001200 *
1201 * RETURN : int32_t type of status
1202 * NO_ERROR -- success
1203 * none-zero failure code
1204 *
1205 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001206int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001207{
1208 int32_t rc = NO_ERROR;
1209
1210 cam_dimension_t max_dim = {0, 0};
1211 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1212 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1213 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1214 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1215 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1216 }
1217
1218 clear_metadata_buffer(mParameters);
1219
1220 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1221 max_dim);
1222 if (rc != NO_ERROR) {
1223 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1224 return rc;
1225 }
1226
1227 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1228 if (rc != NO_ERROR) {
1229 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1230 return rc;
1231 }
1232
1233 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001234 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001235
1236 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1237 mParameters);
1238 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001239 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 return rc;
1241 }
1242
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001243 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1244 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u", __FUNCTION__,
1245 sensorModeInfo.active_array_size.width, sensorModeInfo.active_array_size.height,
1246 sensorModeInfo.pixel_array_size.width, sensorModeInfo.pixel_array_size.height,
1247 sensorModeInfo.op_pixel_clk);
Thierry Strudel3d639192016-09-09 11:52:26 -07001248
1249 return rc;
1250}
1251
1252/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001253 * FUNCTION : addToPPFeatureMask
1254 *
1255 * DESCRIPTION: add additional features to pp feature mask based on
1256 * stream type and usecase
1257 *
1258 * PARAMETERS :
1259 * @stream_format : stream type for feature mask
1260 * @stream_idx : stream idx within postprocess_mask list to change
1261 *
1262 * RETURN : NULL
1263 *
1264 *==========================================================================*/
1265void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1266 uint32_t stream_idx)
1267{
1268 char feature_mask_value[PROPERTY_VALUE_MAX];
1269 cam_feature_mask_t feature_mask;
1270 int args_converted;
1271 int property_len;
1272
1273 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001274#ifdef _LE_CAMERA_
1275 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1276 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1277 property_len = property_get("persist.camera.hal3.feature",
1278 feature_mask_value, swtnr_feature_mask_value);
1279#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001280 property_len = property_get("persist.camera.hal3.feature",
1281 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001282#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001283 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1284 (feature_mask_value[1] == 'x')) {
1285 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1286 } else {
1287 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1288 }
1289 if (1 != args_converted) {
1290 feature_mask = 0;
1291 LOGE("Wrong feature mask %s", feature_mask_value);
1292 return;
1293 }
1294
1295 switch (stream_format) {
1296 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1297 /* Add LLVD to pp feature mask only if video hint is enabled */
1298 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1299 mStreamConfigInfo.postprocess_mask[stream_idx]
1300 |= CAM_QTI_FEATURE_SW_TNR;
1301 LOGH("Added SW TNR to pp feature mask");
1302 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1303 mStreamConfigInfo.postprocess_mask[stream_idx]
1304 |= CAM_QCOM_FEATURE_LLVD;
1305 LOGH("Added LLVD SeeMore to pp feature mask");
1306 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001307 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1308 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1309 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001311 break;
1312 }
1313 default:
1314 break;
1315 }
1316 LOGD("PP feature mask %llx",
1317 mStreamConfigInfo.postprocess_mask[stream_idx]);
1318}
1319
1320/*==============================================================================
1321 * FUNCTION : updateFpsInPreviewBuffer
1322 *
1323 * DESCRIPTION: update FPS information in preview buffer.
1324 *
1325 * PARAMETERS :
1326 * @metadata : pointer to metadata buffer
1327 * @frame_number: frame_number to look for in pending buffer list
1328 *
1329 * RETURN : None
1330 *
1331 *==========================================================================*/
1332void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1333 uint32_t frame_number)
1334{
1335 // Mark all pending buffers for this particular request
1336 // with corresponding framerate information
1337 for (List<PendingBuffersInRequest>::iterator req =
1338 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1339 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1340 for(List<PendingBufferInfo>::iterator j =
1341 req->mPendingBufferList.begin();
1342 j != req->mPendingBufferList.end(); j++) {
1343 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1344 if ((req->frame_number == frame_number) &&
1345 (channel->getStreamTypeMask() &
1346 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1347 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1348 CAM_INTF_PARM_FPS_RANGE, metadata) {
1349 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1350 struct private_handle_t *priv_handle =
1351 (struct private_handle_t *)(*(j->buffer));
1352 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1353 }
1354 }
1355 }
1356 }
1357}
1358
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001359/*==============================================================================
1360 * FUNCTION : updateTimeStampInPendingBuffers
1361 *
1362 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1363 * of a frame number
1364 *
1365 * PARAMETERS :
1366 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1367 * @timestamp : timestamp to be set
1368 *
1369 * RETURN : None
1370 *
1371 *==========================================================================*/
1372void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1373 uint32_t frameNumber, nsecs_t timestamp)
1374{
1375 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1376 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1377 if (req->frame_number != frameNumber)
1378 continue;
1379
1380 for (auto k = req->mPendingBufferList.begin();
1381 k != req->mPendingBufferList.end(); k++ ) {
1382 struct private_handle_t *priv_handle =
1383 (struct private_handle_t *) (*(k->buffer));
1384 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1385 }
1386 }
1387 return;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*===========================================================================
1391 * FUNCTION : configureStreams
1392 *
1393 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1394 * and output streams.
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 *
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::configureStreams(
1403 camera3_stream_configuration_t *streamList)
1404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001405 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 int rc = 0;
1407
1408 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001409 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001410 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001411 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001412
1413 return rc;
1414}
1415
1416/*===========================================================================
1417 * FUNCTION : configureStreamsPerfLocked
1418 *
1419 * DESCRIPTION: configureStreams while perfLock is held.
1420 *
1421 * PARAMETERS :
1422 * @stream_list : streams to be configured
1423 *
1424 * RETURN : int32_t type of status
1425 * NO_ERROR -- success
1426 * none-zero failure code
1427 *==========================================================================*/
1428int QCamera3HardwareInterface::configureStreamsPerfLocked(
1429 camera3_stream_configuration_t *streamList)
1430{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001431 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001432 int rc = 0;
1433
1434 // Sanity check stream_list
1435 if (streamList == NULL) {
1436 LOGE("NULL stream configuration");
1437 return BAD_VALUE;
1438 }
1439 if (streamList->streams == NULL) {
1440 LOGE("NULL stream list");
1441 return BAD_VALUE;
1442 }
1443
1444 if (streamList->num_streams < 1) {
1445 LOGE("Bad number of streams requested: %d",
1446 streamList->num_streams);
1447 return BAD_VALUE;
1448 }
1449
1450 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1451 LOGE("Maximum number of streams %d exceeded: %d",
1452 MAX_NUM_STREAMS, streamList->num_streams);
1453 return BAD_VALUE;
1454 }
1455
1456 mOpMode = streamList->operation_mode;
1457 LOGD("mOpMode: %d", mOpMode);
1458
1459 /* first invalidate all the steams in the mStreamList
1460 * if they appear again, they will be validated */
1461 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1462 it != mStreamInfo.end(); it++) {
1463 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1464 if (channel) {
1465 channel->stop();
1466 }
1467 (*it)->status = INVALID;
1468 }
1469
1470 if (mRawDumpChannel) {
1471 mRawDumpChannel->stop();
1472 delete mRawDumpChannel;
1473 mRawDumpChannel = NULL;
1474 }
1475
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 if (mHdrPlusRawSrcChannel) {
1477 mHdrPlusRawSrcChannel->stop();
1478 delete mHdrPlusRawSrcChannel;
1479 mHdrPlusRawSrcChannel = NULL;
1480 }
1481
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 if (mSupportChannel)
1483 mSupportChannel->stop();
1484
1485 if (mAnalysisChannel) {
1486 mAnalysisChannel->stop();
1487 }
1488 if (mMetadataChannel) {
1489 /* If content of mStreamInfo is not 0, there is metadata stream */
1490 mMetadataChannel->stop();
1491 }
1492 if (mChannelHandle) {
1493 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1494 mChannelHandle);
1495 LOGD("stopping channel %d", mChannelHandle);
1496 }
1497
1498 pthread_mutex_lock(&mMutex);
1499
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001500 // Check if HDR+ is enabled.
1501 char prop[PROPERTY_VALUE_MAX];
1502 property_get("persist.camera.hdrplus", prop, "0");
1503 bool enableHdrPlus = atoi(prop);
1504 if (enableHdrPlus) {
1505 ALOGD("%s: HDR+ in Camera HAL enabled.", __FUNCTION__);
1506 // Connect to HDR+ client if not yet.
1507 if (mHdrPlusClient == nullptr) {
1508 mHdrPlusClient = std::make_shared<HdrPlusClient>();
1509 rc = mHdrPlusClient->connect(this);
1510 if (rc < 0) {
1511 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
1512 strerror(-rc), rc);
1513 pthread_mutex_unlock(&mMutex);
1514 return -ENODEV;
1515 }
1516
1517 // Set static metadata.
1518 rc = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
1519 if (rc < 0) {
1520 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
1521 strerror(-rc), rc);
1522 pthread_mutex_unlock(&mMutex);
1523 return -ENODEV;
1524 }
1525 }
1526 } else {
1527 ALOGD("%s: HDR+ in Camera HAL disabled.", __FUNCTION__);
1528 // Disconnect from HDR+ client if HDR+ is not enabled.
1529 if (mHdrPlusClient != nullptr) {
1530 mHdrPlusClient->disconnect();
1531 mHdrPlusClient = nullptr;
1532 }
1533 }
1534
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 // Check state
1536 switch (mState) {
1537 case INITIALIZED:
1538 case CONFIGURED:
1539 case STARTED:
1540 /* valid state */
1541 break;
1542 default:
1543 LOGE("Invalid state %d", mState);
1544 pthread_mutex_unlock(&mMutex);
1545 return -ENODEV;
1546 }
1547
1548 /* Check whether we have video stream */
1549 m_bIs4KVideo = false;
1550 m_bIsVideo = false;
1551 m_bEisSupportedSize = false;
1552 m_bTnrEnabled = false;
1553 bool isZsl = false;
1554 uint32_t videoWidth = 0U;
1555 uint32_t videoHeight = 0U;
1556 size_t rawStreamCnt = 0;
1557 size_t stallStreamCnt = 0;
1558 size_t processedStreamCnt = 0;
1559 // Number of streams on ISP encoder path
1560 size_t numStreamsOnEncoder = 0;
1561 size_t numYuv888OnEncoder = 0;
1562 bool bYuv888OverrideJpeg = false;
1563 cam_dimension_t largeYuv888Size = {0, 0};
1564 cam_dimension_t maxViewfinderSize = {0, 0};
1565 bool bJpegExceeds4K = false;
1566 bool bJpegOnEncoder = false;
1567 bool bUseCommonFeatureMask = false;
1568 cam_feature_mask_t commonFeatureMask = 0;
1569 bool bSmallJpegSize = false;
1570 uint32_t width_ratio;
1571 uint32_t height_ratio;
1572 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1573 camera3_stream_t *inputStream = NULL;
1574 bool isJpeg = false;
1575 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001576 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001577
1578 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1579
1580 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001581 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001582 uint8_t eis_prop_set;
1583 uint32_t maxEisWidth = 0;
1584 uint32_t maxEisHeight = 0;
1585
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001586 // Initialize all instant AEC related variables
1587 mInstantAEC = false;
1588 mResetInstantAEC = false;
1589 mInstantAECSettledFrameNumber = 0;
1590 mAecSkipDisplayFrameBound = 0;
1591 mInstantAecFrameIdxCount = 0;
1592
Thierry Strudel3d639192016-09-09 11:52:26 -07001593 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1594
1595 size_t count = IS_TYPE_MAX;
1596 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1597 for (size_t i = 0; i < count; i++) {
1598 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001599 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1600 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001601 break;
1602 }
1603 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001604 count = CAM_OPT_STAB_MAX;
1605 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1606 for (size_t i = 0; i < count; i++) {
1607 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1608 oisSupported = true;
1609 break;
1610 }
1611 }
1612
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001613 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001614 maxEisWidth = MAX_EIS_WIDTH;
1615 maxEisHeight = MAX_EIS_HEIGHT;
1616 }
1617
1618 /* EIS setprop control */
1619 char eis_prop[PROPERTY_VALUE_MAX];
1620 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001621 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 eis_prop_set = (uint8_t)atoi(eis_prop);
1623
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001624 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001625 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1626
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001627 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1628 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1629
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 /* stream configurations */
1631 for (size_t i = 0; i < streamList->num_streams; i++) {
1632 camera3_stream_t *newStream = streamList->streams[i];
1633 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1634 "height = %d, rotation = %d, usage = 0x%x",
1635 i, newStream->stream_type, newStream->format,
1636 newStream->width, newStream->height, newStream->rotation,
1637 newStream->usage);
1638 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1639 newStream->stream_type == CAMERA3_STREAM_INPUT){
1640 isZsl = true;
1641 }
1642 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1643 inputStream = newStream;
1644 }
1645
1646 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1647 isJpeg = true;
1648 jpegSize.width = newStream->width;
1649 jpegSize.height = newStream->height;
1650 if (newStream->width > VIDEO_4K_WIDTH ||
1651 newStream->height > VIDEO_4K_HEIGHT)
1652 bJpegExceeds4K = true;
1653 }
1654
1655 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1656 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1657 m_bIsVideo = true;
1658 videoWidth = newStream->width;
1659 videoHeight = newStream->height;
1660 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1661 (VIDEO_4K_HEIGHT <= newStream->height)) {
1662 m_bIs4KVideo = true;
1663 }
1664 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1665 (newStream->height <= maxEisHeight);
1666 }
1667 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1668 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1669 switch (newStream->format) {
1670 case HAL_PIXEL_FORMAT_BLOB:
1671 stallStreamCnt++;
1672 if (isOnEncoder(maxViewfinderSize, newStream->width,
1673 newStream->height)) {
1674 numStreamsOnEncoder++;
1675 bJpegOnEncoder = true;
1676 }
1677 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1678 newStream->width);
1679 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1680 newStream->height);;
1681 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1682 "FATAL: max_downscale_factor cannot be zero and so assert");
1683 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1684 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1685 LOGH("Setting small jpeg size flag to true");
1686 bSmallJpegSize = true;
1687 }
1688 break;
1689 case HAL_PIXEL_FORMAT_RAW10:
1690 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1691 case HAL_PIXEL_FORMAT_RAW16:
1692 rawStreamCnt++;
1693 break;
1694 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1695 processedStreamCnt++;
1696 if (isOnEncoder(maxViewfinderSize, newStream->width,
1697 newStream->height)) {
1698 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1699 !IS_USAGE_ZSL(newStream->usage)) {
1700 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1701 }
1702 numStreamsOnEncoder++;
1703 }
1704 break;
1705 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1706 processedStreamCnt++;
1707 if (isOnEncoder(maxViewfinderSize, newStream->width,
1708 newStream->height)) {
1709 // If Yuv888 size is not greater than 4K, set feature mask
1710 // to SUPERSET so that it support concurrent request on
1711 // YUV and JPEG.
1712 if (newStream->width <= VIDEO_4K_WIDTH &&
1713 newStream->height <= VIDEO_4K_HEIGHT) {
1714 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1715 }
1716 numStreamsOnEncoder++;
1717 numYuv888OnEncoder++;
1718 largeYuv888Size.width = newStream->width;
1719 largeYuv888Size.height = newStream->height;
1720 }
1721 break;
1722 default:
1723 processedStreamCnt++;
1724 if (isOnEncoder(maxViewfinderSize, newStream->width,
1725 newStream->height)) {
1726 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1727 numStreamsOnEncoder++;
1728 }
1729 break;
1730 }
1731
1732 }
1733 }
1734
1735 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1736 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1737 !m_bIsVideo) {
1738 m_bEisEnable = false;
1739 }
1740
1741 /* Logic to enable/disable TNR based on specific config size/etc.*/
1742 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1743 ((videoWidth == 1920 && videoHeight == 1080) ||
1744 (videoWidth == 1280 && videoHeight == 720)) &&
1745 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1746 m_bTnrEnabled = true;
1747
1748 /* Check if num_streams is sane */
1749 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1750 rawStreamCnt > MAX_RAW_STREAMS ||
1751 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1752 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1753 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1754 pthread_mutex_unlock(&mMutex);
1755 return -EINVAL;
1756 }
1757 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001758 if (isZsl && m_bIs4KVideo) {
1759 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001760 pthread_mutex_unlock(&mMutex);
1761 return -EINVAL;
1762 }
1763 /* Check if stream sizes are sane */
1764 if (numStreamsOnEncoder > 2) {
1765 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1766 pthread_mutex_unlock(&mMutex);
1767 return -EINVAL;
1768 } else if (1 < numStreamsOnEncoder){
1769 bUseCommonFeatureMask = true;
1770 LOGH("Multiple streams above max viewfinder size, common mask needed");
1771 }
1772
1773 /* Check if BLOB size is greater than 4k in 4k recording case */
1774 if (m_bIs4KVideo && bJpegExceeds4K) {
1775 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1776 pthread_mutex_unlock(&mMutex);
1777 return -EINVAL;
1778 }
1779
1780 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1781 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1782 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1783 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1784 // configurations:
1785 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1786 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1787 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1788 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1789 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1790 __func__);
1791 pthread_mutex_unlock(&mMutex);
1792 return -EINVAL;
1793 }
1794
1795 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1796 // the YUV stream's size is greater or equal to the JPEG size, set common
1797 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1798 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1799 jpegSize.width, jpegSize.height) &&
1800 largeYuv888Size.width > jpegSize.width &&
1801 largeYuv888Size.height > jpegSize.height) {
1802 bYuv888OverrideJpeg = true;
1803 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1804 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1805 }
1806
1807 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1808 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1809 commonFeatureMask);
1810 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1811 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1812
1813 rc = validateStreamDimensions(streamList);
1814 if (rc == NO_ERROR) {
1815 rc = validateStreamRotations(streamList);
1816 }
1817 if (rc != NO_ERROR) {
1818 LOGE("Invalid stream configuration requested!");
1819 pthread_mutex_unlock(&mMutex);
1820 return rc;
1821 }
1822
1823 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1824 for (size_t i = 0; i < streamList->num_streams; i++) {
1825 camera3_stream_t *newStream = streamList->streams[i];
1826 LOGH("newStream type = %d, stream format = %d "
1827 "stream size : %d x %d, stream rotation = %d",
1828 newStream->stream_type, newStream->format,
1829 newStream->width, newStream->height, newStream->rotation);
1830 //if the stream is in the mStreamList validate it
1831 bool stream_exists = false;
1832 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1833 it != mStreamInfo.end(); it++) {
1834 if ((*it)->stream == newStream) {
1835 QCamera3ProcessingChannel *channel =
1836 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1837 stream_exists = true;
1838 if (channel)
1839 delete channel;
1840 (*it)->status = VALID;
1841 (*it)->stream->priv = NULL;
1842 (*it)->channel = NULL;
1843 }
1844 }
1845 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1846 //new stream
1847 stream_info_t* stream_info;
1848 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1849 if (!stream_info) {
1850 LOGE("Could not allocate stream info");
1851 rc = -ENOMEM;
1852 pthread_mutex_unlock(&mMutex);
1853 return rc;
1854 }
1855 stream_info->stream = newStream;
1856 stream_info->status = VALID;
1857 stream_info->channel = NULL;
1858 mStreamInfo.push_back(stream_info);
1859 }
1860 /* Covers Opaque ZSL and API1 F/W ZSL */
1861 if (IS_USAGE_ZSL(newStream->usage)
1862 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1863 if (zslStream != NULL) {
1864 LOGE("Multiple input/reprocess streams requested!");
1865 pthread_mutex_unlock(&mMutex);
1866 return BAD_VALUE;
1867 }
1868 zslStream = newStream;
1869 }
1870 /* Covers YUV reprocess */
1871 if (inputStream != NULL) {
1872 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1873 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1874 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1875 && inputStream->width == newStream->width
1876 && inputStream->height == newStream->height) {
1877 if (zslStream != NULL) {
1878 /* This scenario indicates multiple YUV streams with same size
1879 * as input stream have been requested, since zsl stream handle
1880 * is solely use for the purpose of overriding the size of streams
1881 * which share h/w streams we will just make a guess here as to
1882 * which of the stream is a ZSL stream, this will be refactored
1883 * once we make generic logic for streams sharing encoder output
1884 */
1885 LOGH("Warning, Multiple ip/reprocess streams requested!");
1886 }
1887 zslStream = newStream;
1888 }
1889 }
1890 }
1891
1892 /* If a zsl stream is set, we know that we have configured at least one input or
1893 bidirectional stream */
1894 if (NULL != zslStream) {
1895 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1896 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1897 mInputStreamInfo.format = zslStream->format;
1898 mInputStreamInfo.usage = zslStream->usage;
1899 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1900 mInputStreamInfo.dim.width,
1901 mInputStreamInfo.dim.height,
1902 mInputStreamInfo.format, mInputStreamInfo.usage);
1903 }
1904
1905 cleanAndSortStreamInfo();
1906 if (mMetadataChannel) {
1907 delete mMetadataChannel;
1908 mMetadataChannel = NULL;
1909 }
1910 if (mSupportChannel) {
1911 delete mSupportChannel;
1912 mSupportChannel = NULL;
1913 }
1914
1915 if (mAnalysisChannel) {
1916 delete mAnalysisChannel;
1917 mAnalysisChannel = NULL;
1918 }
1919
1920 if (mDummyBatchChannel) {
1921 delete mDummyBatchChannel;
1922 mDummyBatchChannel = NULL;
1923 }
1924
1925 //Create metadata channel and initialize it
1926 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1927 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1928 gCamCapability[mCameraId]->color_arrangement);
1929 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1930 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001931 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 if (mMetadataChannel == NULL) {
1933 LOGE("failed to allocate metadata channel");
1934 rc = -ENOMEM;
1935 pthread_mutex_unlock(&mMutex);
1936 return rc;
1937 }
1938 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1939 if (rc < 0) {
1940 LOGE("metadata channel initialization failed");
1941 delete mMetadataChannel;
1942 mMetadataChannel = NULL;
1943 pthread_mutex_unlock(&mMutex);
1944 return rc;
1945 }
1946
Thierry Strudel3d639192016-09-09 11:52:26 -07001947 bool isRawStreamRequested = false;
1948 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1949 /* Allocate channel objects for the requested streams */
1950 for (size_t i = 0; i < streamList->num_streams; i++) {
1951 camera3_stream_t *newStream = streamList->streams[i];
1952 uint32_t stream_usage = newStream->usage;
1953 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1954 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1955 struct camera_info *p_info = NULL;
1956 pthread_mutex_lock(&gCamLock);
1957 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1958 pthread_mutex_unlock(&gCamLock);
1959 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1960 || IS_USAGE_ZSL(newStream->usage)) &&
1961 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1963 if (bUseCommonFeatureMask) {
1964 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1965 commonFeatureMask;
1966 } else {
1967 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1968 CAM_QCOM_FEATURE_NONE;
1969 }
1970
1971 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1972 LOGH("Input stream configured, reprocess config");
1973 } else {
1974 //for non zsl streams find out the format
1975 switch (newStream->format) {
1976 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1977 {
1978 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1979 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1980 /* add additional features to pp feature mask */
1981 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1982 mStreamConfigInfo.num_streams);
1983
1984 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1985 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1986 CAM_STREAM_TYPE_VIDEO;
1987 if (m_bTnrEnabled && m_bTnrVideo) {
1988 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1989 CAM_QCOM_FEATURE_CPP_TNR;
1990 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1991 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1992 ~CAM_QCOM_FEATURE_CDS;
1993 }
1994 } else {
1995 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1996 CAM_STREAM_TYPE_PREVIEW;
1997 if (m_bTnrEnabled && m_bTnrPreview) {
1998 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1999 CAM_QCOM_FEATURE_CPP_TNR;
2000 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2001 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2002 ~CAM_QCOM_FEATURE_CDS;
2003 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002004 if(!m_bSwTnrPreview) {
2005 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2006 ~CAM_QTI_FEATURE_SW_TNR;
2007 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 padding_info.width_padding = mSurfaceStridePadding;
2009 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002010 previewSize.width = (int32_t)newStream->width;
2011 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 }
2013 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2014 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2015 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2016 newStream->height;
2017 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2018 newStream->width;
2019 }
2020 }
2021 break;
2022 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2023 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2024 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2025 if (bUseCommonFeatureMask)
2026 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2027 commonFeatureMask;
2028 else
2029 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2030 CAM_QCOM_FEATURE_NONE;
2031 } else {
2032 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2033 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 }
2035 break;
2036 case HAL_PIXEL_FORMAT_BLOB:
2037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2038 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2039 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2040 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2041 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2042 } else {
2043 if (bUseCommonFeatureMask &&
2044 isOnEncoder(maxViewfinderSize, newStream->width,
2045 newStream->height)) {
2046 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2047 } else {
2048 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2049 }
2050 }
2051 if (isZsl) {
2052 if (zslStream) {
2053 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2054 (int32_t)zslStream->width;
2055 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2056 (int32_t)zslStream->height;
2057 } else {
2058 LOGE("Error, No ZSL stream identified");
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062 } else if (m_bIs4KVideo) {
2063 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2064 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2065 } else if (bYuv888OverrideJpeg) {
2066 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2067 (int32_t)largeYuv888Size.width;
2068 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2069 (int32_t)largeYuv888Size.height;
2070 }
2071 break;
2072 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2073 case HAL_PIXEL_FORMAT_RAW16:
2074 case HAL_PIXEL_FORMAT_RAW10:
2075 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2076 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2077 isRawStreamRequested = true;
2078 break;
2079 default:
2080 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2081 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2082 break;
2083 }
2084 }
2085
2086 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2087 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2088 gCamCapability[mCameraId]->color_arrangement);
2089
2090 if (newStream->priv == NULL) {
2091 //New stream, construct channel
2092 switch (newStream->stream_type) {
2093 case CAMERA3_STREAM_INPUT:
2094 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2095 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2096 break;
2097 case CAMERA3_STREAM_BIDIRECTIONAL:
2098 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2099 GRALLOC_USAGE_HW_CAMERA_WRITE;
2100 break;
2101 case CAMERA3_STREAM_OUTPUT:
2102 /* For video encoding stream, set read/write rarely
2103 * flag so that they may be set to un-cached */
2104 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2105 newStream->usage |=
2106 (GRALLOC_USAGE_SW_READ_RARELY |
2107 GRALLOC_USAGE_SW_WRITE_RARELY |
2108 GRALLOC_USAGE_HW_CAMERA_WRITE);
2109 else if (IS_USAGE_ZSL(newStream->usage))
2110 {
2111 LOGD("ZSL usage flag skipping");
2112 }
2113 else if (newStream == zslStream
2114 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2115 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2116 } else
2117 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2118 break;
2119 default:
2120 LOGE("Invalid stream_type %d", newStream->stream_type);
2121 break;
2122 }
2123
2124 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2125 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2126 QCamera3ProcessingChannel *channel = NULL;
2127 switch (newStream->format) {
2128 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2129 if ((newStream->usage &
2130 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2131 (streamList->operation_mode ==
2132 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2133 ) {
2134 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2135 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002136 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002137 this,
2138 newStream,
2139 (cam_stream_type_t)
2140 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2141 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2142 mMetadataChannel,
2143 0); //heap buffers are not required for HFR video channel
2144 if (channel == NULL) {
2145 LOGE("allocation of channel failed");
2146 pthread_mutex_unlock(&mMutex);
2147 return -ENOMEM;
2148 }
2149 //channel->getNumBuffers() will return 0 here so use
2150 //MAX_INFLIGH_HFR_REQUESTS
2151 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2152 newStream->priv = channel;
2153 LOGI("num video buffers in HFR mode: %d",
2154 MAX_INFLIGHT_HFR_REQUESTS);
2155 } else {
2156 /* Copy stream contents in HFR preview only case to create
2157 * dummy batch channel so that sensor streaming is in
2158 * HFR mode */
2159 if (!m_bIsVideo && (streamList->operation_mode ==
2160 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2161 mDummyBatchStream = *newStream;
2162 }
2163 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2164 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002165 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002166 this,
2167 newStream,
2168 (cam_stream_type_t)
2169 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2170 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2171 mMetadataChannel,
2172 MAX_INFLIGHT_REQUESTS);
2173 if (channel == NULL) {
2174 LOGE("allocation of channel failed");
2175 pthread_mutex_unlock(&mMutex);
2176 return -ENOMEM;
2177 }
2178 newStream->max_buffers = channel->getNumBuffers();
2179 newStream->priv = channel;
2180 }
2181 break;
2182 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2183 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2184 mChannelHandle,
2185 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002186 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002187 this,
2188 newStream,
2189 (cam_stream_type_t)
2190 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2191 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2192 mMetadataChannel);
2193 if (channel == NULL) {
2194 LOGE("allocation of YUV channel failed");
2195 pthread_mutex_unlock(&mMutex);
2196 return -ENOMEM;
2197 }
2198 newStream->max_buffers = channel->getNumBuffers();
2199 newStream->priv = channel;
2200 break;
2201 }
2202 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2203 case HAL_PIXEL_FORMAT_RAW16:
2204 case HAL_PIXEL_FORMAT_RAW10:
2205 mRawChannel = new QCamera3RawChannel(
2206 mCameraHandle->camera_handle, mChannelHandle,
2207 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002208 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002209 this, newStream,
2210 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2211 mMetadataChannel,
2212 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2213 if (mRawChannel == NULL) {
2214 LOGE("allocation of raw channel failed");
2215 pthread_mutex_unlock(&mMutex);
2216 return -ENOMEM;
2217 }
2218 newStream->max_buffers = mRawChannel->getNumBuffers();
2219 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2220 break;
2221 case HAL_PIXEL_FORMAT_BLOB:
2222 // Max live snapshot inflight buffer is 1. This is to mitigate
2223 // frame drop issues for video snapshot. The more buffers being
2224 // allocated, the more frame drops there are.
2225 mPictureChannel = new QCamera3PicChannel(
2226 mCameraHandle->camera_handle, mChannelHandle,
2227 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002228 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2230 m_bIs4KVideo, isZsl, mMetadataChannel,
2231 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2232 if (mPictureChannel == NULL) {
2233 LOGE("allocation of channel failed");
2234 pthread_mutex_unlock(&mMutex);
2235 return -ENOMEM;
2236 }
2237 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2238 newStream->max_buffers = mPictureChannel->getNumBuffers();
2239 mPictureChannel->overrideYuvSize(
2240 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2241 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2242 break;
2243
2244 default:
2245 LOGE("not a supported format 0x%x", newStream->format);
2246 break;
2247 }
2248 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2249 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2250 } else {
2251 LOGE("Error, Unknown stream type");
2252 pthread_mutex_unlock(&mMutex);
2253 return -EINVAL;
2254 }
2255
2256 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2257 if (channel != NULL && channel->isUBWCEnabled()) {
2258 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2260 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002261 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2262 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2263 }
2264 }
2265
2266 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2267 it != mStreamInfo.end(); it++) {
2268 if ((*it)->stream == newStream) {
2269 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2270 break;
2271 }
2272 }
2273 } else {
2274 // Channel already exists for this stream
2275 // Do nothing for now
2276 }
2277 padding_info = gCamCapability[mCameraId]->padding_info;
2278
2279 /* Do not add entries for input stream in metastream info
2280 * since there is no real stream associated with it
2281 */
2282 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2283 mStreamConfigInfo.num_streams++;
2284 }
2285
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002286 // Create analysis stream all the time, even when h/w support is not available
2287 {
2288 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2289 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2290 gCamCapability[mCameraId]->color_arrangement);
2291 cam_analysis_info_t analysisInfo;
2292 int32_t ret = NO_ERROR;
2293 ret = mCommon.getAnalysisInfo(
2294 FALSE,
2295 analysisFeatureMask,
2296 &analysisInfo);
2297 if (ret == NO_ERROR) {
2298 cam_dimension_t analysisDim;
2299 analysisDim = mCommon.getMatchingDimension(previewSize,
2300 analysisInfo.analysis_recommended_res);
2301
2302 mAnalysisChannel = new QCamera3SupportChannel(
2303 mCameraHandle->camera_handle,
2304 mChannelHandle,
2305 mCameraHandle->ops,
2306 &analysisInfo.analysis_padding_info,
2307 analysisFeatureMask,
2308 CAM_STREAM_TYPE_ANALYSIS,
2309 &analysisDim,
2310 (analysisInfo.analysis_format
2311 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2312 : CAM_FORMAT_YUV_420_NV21),
2313 analysisInfo.hw_analysis_supported,
2314 gCamCapability[mCameraId]->color_arrangement,
2315 this,
2316 0); // force buffer count to 0
2317 } else {
2318 LOGW("getAnalysisInfo failed, ret = %d", ret);
2319 }
2320 if (!mAnalysisChannel) {
2321 LOGW("Analysis channel cannot be created");
2322 }
2323 }
2324
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 //RAW DUMP channel
2326 if (mEnableRawDump && isRawStreamRequested == false){
2327 cam_dimension_t rawDumpSize;
2328 rawDumpSize = getMaxRawSize(mCameraId);
2329 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2330 setPAAFSupport(rawDumpFeatureMask,
2331 CAM_STREAM_TYPE_RAW,
2332 gCamCapability[mCameraId]->color_arrangement);
2333 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2334 mChannelHandle,
2335 mCameraHandle->ops,
2336 rawDumpSize,
2337 &padding_info,
2338 this, rawDumpFeatureMask);
2339 if (!mRawDumpChannel) {
2340 LOGE("Raw Dump channel cannot be created");
2341 pthread_mutex_unlock(&mMutex);
2342 return -ENOMEM;
2343 }
2344 }
2345
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002346 // Initialize HDR+ Raw Source channel.
2347 if (mHdrPlusClient != nullptr) {
2348 if (isRawStreamRequested || mRawDumpChannel) {
2349 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported.",
2350 __FUNCTION__);
2351 mHdrPlusClient->disconnect();
2352 mHdrPlusClient = nullptr;
2353 } else {
2354 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2355 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2356 setPAAFSupport(hdrPlusRawFeatureMask,
2357 CAM_STREAM_TYPE_RAW,
2358 gCamCapability[mCameraId]->color_arrangement);
2359 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2360 mChannelHandle,
2361 mCameraHandle->ops,
2362 rawSize,
2363 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002364 this, hdrPlusRawFeatureMask,
2365 mHdrPlusClient,
2366 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002367 if (!mHdrPlusRawSrcChannel) {
2368 LOGE("HDR+ Raw Source channel cannot be created");
2369 pthread_mutex_unlock(&mMutex);
2370 return -ENOMEM;
2371 }
2372 }
2373 }
2374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375
2376 if (mAnalysisChannel) {
2377 cam_analysis_info_t analysisInfo;
2378 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2379 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2380 CAM_STREAM_TYPE_ANALYSIS;
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2382 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2383 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2384 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2385 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002386 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2388 &analysisInfo);
2389 if (rc != NO_ERROR) {
2390 LOGE("getAnalysisInfo failed, ret = %d", rc);
2391 pthread_mutex_unlock(&mMutex);
2392 return rc;
2393 }
2394 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002395 mCommon.getMatchingDimension(previewSize,
2396 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 mStreamConfigInfo.num_streams++;
2398 }
2399
2400 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2401 cam_analysis_info_t supportInfo;
2402 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2403 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2404 setPAAFSupport(callbackFeatureMask,
2405 CAM_STREAM_TYPE_CALLBACK,
2406 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002407 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002408 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002409 if (ret != NO_ERROR) {
2410 /* Ignore the error for Mono camera
2411 * because the PAAF bit mask is only set
2412 * for CAM_STREAM_TYPE_ANALYSIS stream type
2413 */
2414 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2415 LOGW("getAnalysisInfo failed, ret = %d", ret);
2416 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002417 }
2418 mSupportChannel = new QCamera3SupportChannel(
2419 mCameraHandle->camera_handle,
2420 mChannelHandle,
2421 mCameraHandle->ops,
2422 &gCamCapability[mCameraId]->padding_info,
2423 callbackFeatureMask,
2424 CAM_STREAM_TYPE_CALLBACK,
2425 &QCamera3SupportChannel::kDim,
2426 CAM_FORMAT_YUV_420_NV21,
2427 supportInfo.hw_analysis_supported,
2428 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002429 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 if (!mSupportChannel) {
2431 LOGE("dummy channel cannot be created");
2432 pthread_mutex_unlock(&mMutex);
2433 return -ENOMEM;
2434 }
2435 }
2436
2437 if (mSupportChannel) {
2438 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2439 QCamera3SupportChannel::kDim;
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_CALLBACK;
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2443 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2444 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2445 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2446 gCamCapability[mCameraId]->color_arrangement);
2447 mStreamConfigInfo.num_streams++;
2448 }
2449
2450 if (mRawDumpChannel) {
2451 cam_dimension_t rawSize;
2452 rawSize = getMaxRawSize(mCameraId);
2453 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2454 rawSize;
2455 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2456 CAM_STREAM_TYPE_RAW;
2457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2458 CAM_QCOM_FEATURE_NONE;
2459 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2460 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2461 gCamCapability[mCameraId]->color_arrangement);
2462 mStreamConfigInfo.num_streams++;
2463 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002464
2465 if (mHdrPlusRawSrcChannel) {
2466 cam_dimension_t rawSize;
2467 rawSize = getMaxRawSize(mCameraId);
2468 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2469 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2470 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2471 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2473 gCamCapability[mCameraId]->color_arrangement);
2474 mStreamConfigInfo.num_streams++;
2475 }
2476
Thierry Strudel3d639192016-09-09 11:52:26 -07002477 /* In HFR mode, if video stream is not added, create a dummy channel so that
2478 * ISP can create a batch mode even for preview only case. This channel is
2479 * never 'start'ed (no stream-on), it is only 'initialized' */
2480 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2481 !m_bIsVideo) {
2482 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2483 setPAAFSupport(dummyFeatureMask,
2484 CAM_STREAM_TYPE_VIDEO,
2485 gCamCapability[mCameraId]->color_arrangement);
2486 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle,
2488 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002489 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 this,
2491 &mDummyBatchStream,
2492 CAM_STREAM_TYPE_VIDEO,
2493 dummyFeatureMask,
2494 mMetadataChannel);
2495 if (NULL == mDummyBatchChannel) {
2496 LOGE("creation of mDummyBatchChannel failed."
2497 "Preview will use non-hfr sensor mode ");
2498 }
2499 }
2500 if (mDummyBatchChannel) {
2501 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2502 mDummyBatchStream.width;
2503 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2504 mDummyBatchStream.height;
2505 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2506 CAM_STREAM_TYPE_VIDEO;
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2508 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2509 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2511 gCamCapability[mCameraId]->color_arrangement);
2512 mStreamConfigInfo.num_streams++;
2513 }
2514
2515 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2516 mStreamConfigInfo.buffer_info.max_buffers =
2517 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2518
2519 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2520 for (pendingRequestIterator i = mPendingRequestsList.begin();
2521 i != mPendingRequestsList.end();) {
2522 i = erasePendingRequest(i);
2523 }
2524 mPendingFrameDropList.clear();
2525 // Initialize/Reset the pending buffers list
2526 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2527 req.mPendingBufferList.clear();
2528 }
2529 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2530
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 mCurJpegMeta.clear();
2532 //Get min frame duration for this streams configuration
2533 deriveMinFrameDuration();
2534
2535 // Update state
2536 mState = CONFIGURED;
2537
2538 pthread_mutex_unlock(&mMutex);
2539
2540 return rc;
2541}
2542
2543/*===========================================================================
2544 * FUNCTION : validateCaptureRequest
2545 *
2546 * DESCRIPTION: validate a capture request from camera service
2547 *
2548 * PARAMETERS :
2549 * @request : request from framework to process
2550 *
2551 * RETURN :
2552 *
2553 *==========================================================================*/
2554int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002555 camera3_capture_request_t *request,
2556 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002557{
2558 ssize_t idx = 0;
2559 const camera3_stream_buffer_t *b;
2560 CameraMetadata meta;
2561
2562 /* Sanity check the request */
2563 if (request == NULL) {
2564 LOGE("NULL capture request");
2565 return BAD_VALUE;
2566 }
2567
2568 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2569 /*settings cannot be null for the first request*/
2570 return BAD_VALUE;
2571 }
2572
2573 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002574 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2575 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002576 LOGE("Request %d: No output buffers provided!",
2577 __FUNCTION__, frameNumber);
2578 return BAD_VALUE;
2579 }
2580 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2581 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2582 request->num_output_buffers, MAX_NUM_STREAMS);
2583 return BAD_VALUE;
2584 }
2585 if (request->input_buffer != NULL) {
2586 b = request->input_buffer;
2587 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2588 LOGE("Request %d: Buffer %ld: Status not OK!",
2589 frameNumber, (long)idx);
2590 return BAD_VALUE;
2591 }
2592 if (b->release_fence != -1) {
2593 LOGE("Request %d: Buffer %ld: Has a release fence!",
2594 frameNumber, (long)idx);
2595 return BAD_VALUE;
2596 }
2597 if (b->buffer == NULL) {
2598 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2599 frameNumber, (long)idx);
2600 return BAD_VALUE;
2601 }
2602 }
2603
2604 // Validate all buffers
2605 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002606 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 QCamera3ProcessingChannel *channel =
2608 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2609 if (channel == NULL) {
2610 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2611 frameNumber, (long)idx);
2612 return BAD_VALUE;
2613 }
2614 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2615 LOGE("Request %d: Buffer %ld: Status not OK!",
2616 frameNumber, (long)idx);
2617 return BAD_VALUE;
2618 }
2619 if (b->release_fence != -1) {
2620 LOGE("Request %d: Buffer %ld: Has a release fence!",
2621 frameNumber, (long)idx);
2622 return BAD_VALUE;
2623 }
2624 if (b->buffer == NULL) {
2625 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2626 frameNumber, (long)idx);
2627 return BAD_VALUE;
2628 }
2629 if (*(b->buffer) == NULL) {
2630 LOGE("Request %d: Buffer %ld: NULL private handle!",
2631 frameNumber, (long)idx);
2632 return BAD_VALUE;
2633 }
2634 idx++;
2635 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002637 return NO_ERROR;
2638}
2639
2640/*===========================================================================
2641 * FUNCTION : deriveMinFrameDuration
2642 *
2643 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2644 * on currently configured streams.
2645 *
2646 * PARAMETERS : NONE
2647 *
2648 * RETURN : NONE
2649 *
2650 *==========================================================================*/
2651void QCamera3HardwareInterface::deriveMinFrameDuration()
2652{
2653 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2654
2655 maxJpegDim = 0;
2656 maxProcessedDim = 0;
2657 maxRawDim = 0;
2658
2659 // Figure out maximum jpeg, processed, and raw dimensions
2660 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2661 it != mStreamInfo.end(); it++) {
2662
2663 // Input stream doesn't have valid stream_type
2664 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2665 continue;
2666
2667 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2668 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2669 if (dimension > maxJpegDim)
2670 maxJpegDim = dimension;
2671 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2672 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2673 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2674 if (dimension > maxRawDim)
2675 maxRawDim = dimension;
2676 } else {
2677 if (dimension > maxProcessedDim)
2678 maxProcessedDim = dimension;
2679 }
2680 }
2681
2682 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2683 MAX_SIZES_CNT);
2684
2685 //Assume all jpeg dimensions are in processed dimensions.
2686 if (maxJpegDim > maxProcessedDim)
2687 maxProcessedDim = maxJpegDim;
2688 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2689 if (maxProcessedDim > maxRawDim) {
2690 maxRawDim = INT32_MAX;
2691
2692 for (size_t i = 0; i < count; i++) {
2693 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2694 gCamCapability[mCameraId]->raw_dim[i].height;
2695 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2696 maxRawDim = dimension;
2697 }
2698 }
2699
2700 //Find minimum durations for processed, jpeg, and raw
2701 for (size_t i = 0; i < count; i++) {
2702 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2703 gCamCapability[mCameraId]->raw_dim[i].height) {
2704 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2705 break;
2706 }
2707 }
2708 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2709 for (size_t i = 0; i < count; i++) {
2710 if (maxProcessedDim ==
2711 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2712 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2713 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2714 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2715 break;
2716 }
2717 }
2718}
2719
2720/*===========================================================================
2721 * FUNCTION : getMinFrameDuration
2722 *
2723 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2724 * and current request configuration.
2725 *
2726 * PARAMETERS : @request: requset sent by the frameworks
2727 *
2728 * RETURN : min farme duration for a particular request
2729 *
2730 *==========================================================================*/
2731int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2732{
2733 bool hasJpegStream = false;
2734 bool hasRawStream = false;
2735 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2736 const camera3_stream_t *stream = request->output_buffers[i].stream;
2737 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2738 hasJpegStream = true;
2739 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2740 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2741 stream->format == HAL_PIXEL_FORMAT_RAW16)
2742 hasRawStream = true;
2743 }
2744
2745 if (!hasJpegStream)
2746 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2747 else
2748 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2749}
2750
2751/*===========================================================================
2752 * FUNCTION : handleBuffersDuringFlushLock
2753 *
2754 * DESCRIPTION: Account for buffers returned from back-end during flush
2755 * This function is executed while mMutex is held by the caller.
2756 *
2757 * PARAMETERS :
2758 * @buffer: image buffer for the callback
2759 *
2760 * RETURN :
2761 *==========================================================================*/
2762void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2763{
2764 bool buffer_found = false;
2765 for (List<PendingBuffersInRequest>::iterator req =
2766 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2767 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2768 for (List<PendingBufferInfo>::iterator i =
2769 req->mPendingBufferList.begin();
2770 i != req->mPendingBufferList.end(); i++) {
2771 if (i->buffer == buffer->buffer) {
2772 mPendingBuffersMap.numPendingBufsAtFlush--;
2773 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2774 buffer->buffer, req->frame_number,
2775 mPendingBuffersMap.numPendingBufsAtFlush);
2776 buffer_found = true;
2777 break;
2778 }
2779 }
2780 if (buffer_found) {
2781 break;
2782 }
2783 }
2784 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2785 //signal the flush()
2786 LOGD("All buffers returned to HAL. Continue flush");
2787 pthread_cond_signal(&mBuffersCond);
2788 }
2789}
2790
Thierry Strudel3d639192016-09-09 11:52:26 -07002791/*===========================================================================
2792 * FUNCTION : handleBatchMetadata
2793 *
2794 * DESCRIPTION: Handles metadata buffer callback in batch mode
2795 *
2796 * PARAMETERS : @metadata_buf: metadata buffer
2797 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2798 * the meta buf in this method
2799 *
2800 * RETURN :
2801 *
2802 *==========================================================================*/
2803void QCamera3HardwareInterface::handleBatchMetadata(
2804 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2805{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002806 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002807
2808 if (NULL == metadata_buf) {
2809 LOGE("metadata_buf is NULL");
2810 return;
2811 }
2812 /* In batch mode, the metdata will contain the frame number and timestamp of
2813 * the last frame in the batch. Eg: a batch containing buffers from request
2814 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2815 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2816 * multiple process_capture_results */
2817 metadata_buffer_t *metadata =
2818 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2819 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2820 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2821 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2822 uint32_t frame_number = 0, urgent_frame_number = 0;
2823 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2824 bool invalid_metadata = false;
2825 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2826 size_t loopCount = 1;
2827
2828 int32_t *p_frame_number_valid =
2829 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2830 uint32_t *p_frame_number =
2831 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2832 int64_t *p_capture_time =
2833 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2834 int32_t *p_urgent_frame_number_valid =
2835 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2836 uint32_t *p_urgent_frame_number =
2837 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2838
2839 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2840 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2841 (NULL == p_urgent_frame_number)) {
2842 LOGE("Invalid metadata");
2843 invalid_metadata = true;
2844 } else {
2845 frame_number_valid = *p_frame_number_valid;
2846 last_frame_number = *p_frame_number;
2847 last_frame_capture_time = *p_capture_time;
2848 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2849 last_urgent_frame_number = *p_urgent_frame_number;
2850 }
2851
2852 /* In batchmode, when no video buffers are requested, set_parms are sent
2853 * for every capture_request. The difference between consecutive urgent
2854 * frame numbers and frame numbers should be used to interpolate the
2855 * corresponding frame numbers and time stamps */
2856 pthread_mutex_lock(&mMutex);
2857 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002858 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2859 if(idx < 0) {
2860 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2861 last_urgent_frame_number);
2862 mState = ERROR;
2863 pthread_mutex_unlock(&mMutex);
2864 return;
2865 }
2866 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002867 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2868 first_urgent_frame_number;
2869
2870 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2871 urgent_frame_number_valid,
2872 first_urgent_frame_number, last_urgent_frame_number);
2873 }
2874
2875 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002876 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2877 if(idx < 0) {
2878 LOGE("Invalid frame number received: %d. Irrecoverable error",
2879 last_frame_number);
2880 mState = ERROR;
2881 pthread_mutex_unlock(&mMutex);
2882 return;
2883 }
2884 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002885 frameNumDiff = last_frame_number + 1 -
2886 first_frame_number;
2887 mPendingBatchMap.removeItem(last_frame_number);
2888
2889 LOGD("frm: valid: %d frm_num: %d - %d",
2890 frame_number_valid,
2891 first_frame_number, last_frame_number);
2892
2893 }
2894 pthread_mutex_unlock(&mMutex);
2895
2896 if (urgent_frame_number_valid || frame_number_valid) {
2897 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2898 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2899 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2900 urgentFrameNumDiff, last_urgent_frame_number);
2901 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2902 LOGE("frameNumDiff: %d frameNum: %d",
2903 frameNumDiff, last_frame_number);
2904 }
2905
2906 for (size_t i = 0; i < loopCount; i++) {
2907 /* handleMetadataWithLock is called even for invalid_metadata for
2908 * pipeline depth calculation */
2909 if (!invalid_metadata) {
2910 /* Infer frame number. Batch metadata contains frame number of the
2911 * last frame */
2912 if (urgent_frame_number_valid) {
2913 if (i < urgentFrameNumDiff) {
2914 urgent_frame_number =
2915 first_urgent_frame_number + i;
2916 LOGD("inferred urgent frame_number: %d",
2917 urgent_frame_number);
2918 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2919 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2920 } else {
2921 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2922 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2923 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2924 }
2925 }
2926
2927 /* Infer frame number. Batch metadata contains frame number of the
2928 * last frame */
2929 if (frame_number_valid) {
2930 if (i < frameNumDiff) {
2931 frame_number = first_frame_number + i;
2932 LOGD("inferred frame_number: %d", frame_number);
2933 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2934 CAM_INTF_META_FRAME_NUMBER, frame_number);
2935 } else {
2936 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2937 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2938 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2939 }
2940 }
2941
2942 if (last_frame_capture_time) {
2943 //Infer timestamp
2944 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002945 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002946 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002947 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002948 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2949 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2950 LOGD("batch capture_time: %lld, capture_time: %lld",
2951 last_frame_capture_time, capture_time);
2952 }
2953 }
2954 pthread_mutex_lock(&mMutex);
2955 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002956 false /* free_and_bufdone_meta_buf */,
2957 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 pthread_mutex_unlock(&mMutex);
2959 }
2960
2961 /* BufDone metadata buffer */
2962 if (free_and_bufdone_meta_buf) {
2963 mMetadataChannel->bufDone(metadata_buf);
2964 free(metadata_buf);
2965 }
2966}
2967
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002968void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2969 camera3_error_msg_code_t errorCode)
2970{
2971 camera3_notify_msg_t notify_msg;
2972 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2973 notify_msg.type = CAMERA3_MSG_ERROR;
2974 notify_msg.message.error.error_code = errorCode;
2975 notify_msg.message.error.error_stream = NULL;
2976 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002977 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002978
2979 return;
2980}
Thierry Strudel3d639192016-09-09 11:52:26 -07002981/*===========================================================================
2982 * FUNCTION : handleMetadataWithLock
2983 *
2984 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2985 *
2986 * PARAMETERS : @metadata_buf: metadata buffer
2987 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2988 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002989 * @firstMetadataInBatch: Boolean to indicate whether this is the
2990 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002991 *
2992 * RETURN :
2993 *
2994 *==========================================================================*/
2995void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002996 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2997 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002998{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002999 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003000 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3001 //during flush do not send metadata from this thread
3002 LOGD("not sending metadata during flush or when mState is error");
3003 if (free_and_bufdone_meta_buf) {
3004 mMetadataChannel->bufDone(metadata_buf);
3005 free(metadata_buf);
3006 }
3007 return;
3008 }
3009
3010 //not in flush
3011 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3012 int32_t frame_number_valid, urgent_frame_number_valid;
3013 uint32_t frame_number, urgent_frame_number;
3014 int64_t capture_time;
3015 nsecs_t currentSysTime;
3016
3017 int32_t *p_frame_number_valid =
3018 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3019 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3020 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3021 int32_t *p_urgent_frame_number_valid =
3022 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3023 uint32_t *p_urgent_frame_number =
3024 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3025 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3026 metadata) {
3027 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3028 *p_frame_number_valid, *p_frame_number);
3029 }
3030
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003031 camera_metadata_t *resultMetadata = nullptr;
3032
Thierry Strudel3d639192016-09-09 11:52:26 -07003033 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3034 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3035 LOGE("Invalid metadata");
3036 if (free_and_bufdone_meta_buf) {
3037 mMetadataChannel->bufDone(metadata_buf);
3038 free(metadata_buf);
3039 }
3040 goto done_metadata;
3041 }
3042 frame_number_valid = *p_frame_number_valid;
3043 frame_number = *p_frame_number;
3044 capture_time = *p_capture_time;
3045 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3046 urgent_frame_number = *p_urgent_frame_number;
3047 currentSysTime = systemTime(CLOCK_MONOTONIC);
3048
3049 // Detect if buffers from any requests are overdue
3050 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003051 int64_t timeout;
3052 {
3053 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3054 // If there is a pending HDR+ request, the following requests may be blocked until the
3055 // HDR+ request is done. So allow a longer timeout.
3056 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3057 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3058 }
3059
3060 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003061 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003062 assert(missed.stream->priv);
3063 if (missed.stream->priv) {
3064 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3065 assert(ch->mStreams[0]);
3066 if (ch->mStreams[0]) {
3067 LOGE("Cancel missing frame = %d, buffer = %p,"
3068 "stream type = %d, stream format = %d",
3069 req.frame_number, missed.buffer,
3070 ch->mStreams[0]->getMyType(), missed.stream->format);
3071 ch->timeoutFrame(req.frame_number);
3072 }
3073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003074 }
3075 }
3076 }
3077 //Partial result on process_capture_result for timestamp
3078 if (urgent_frame_number_valid) {
3079 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3080 urgent_frame_number, capture_time);
3081
3082 //Recieved an urgent Frame Number, handle it
3083 //using partial results
3084 for (pendingRequestIterator i =
3085 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3086 LOGD("Iterator Frame = %d urgent frame = %d",
3087 i->frame_number, urgent_frame_number);
3088
3089 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3090 (i->partial_result_cnt == 0)) {
3091 LOGE("Error: HAL missed urgent metadata for frame number %d",
3092 i->frame_number);
3093 }
3094
3095 if (i->frame_number == urgent_frame_number &&
3096 i->bUrgentReceived == 0) {
3097
3098 camera3_capture_result_t result;
3099 memset(&result, 0, sizeof(camera3_capture_result_t));
3100
3101 i->partial_result_cnt++;
3102 i->bUrgentReceived = 1;
3103 // Extract 3A metadata
3104 result.result =
3105 translateCbUrgentMetadataToResultMetadata(metadata);
3106 // Populate metadata result
3107 result.frame_number = urgent_frame_number;
3108 result.num_output_buffers = 0;
3109 result.output_buffers = NULL;
3110 result.partial_result = i->partial_result_cnt;
3111
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003112 if (mHdrPlusClient != nullptr) {
3113 // Notify HDR+ client about the partial metadata.
3114 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3115 result.partial_result == PARTIAL_RESULT_COUNT);
3116 }
3117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003119 LOGD("urgent frame_number = %u, capture_time = %lld",
3120 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003121 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3122 // Instant AEC settled for this frame.
3123 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3124 mInstantAECSettledFrameNumber = urgent_frame_number;
3125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003126 free_camera_metadata((camera_metadata_t *)result.result);
3127 break;
3128 }
3129 }
3130 }
3131
3132 if (!frame_number_valid) {
3133 LOGD("Not a valid normal frame number, used as SOF only");
3134 if (free_and_bufdone_meta_buf) {
3135 mMetadataChannel->bufDone(metadata_buf);
3136 free(metadata_buf);
3137 }
3138 goto done_metadata;
3139 }
3140 LOGH("valid frame_number = %u, capture_time = %lld",
3141 frame_number, capture_time);
3142
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003143 // Check whether any stream buffer corresponding to this is dropped or not
3144 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3145 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3146 for (auto & pendingRequest : mPendingRequestsList) {
3147 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3148 mInstantAECSettledFrameNumber)) {
3149 camera3_notify_msg_t notify_msg = {};
3150 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003151 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003152 QCamera3ProcessingChannel *channel =
3153 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003154 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003155 if (p_cam_frame_drop) {
3156 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003157 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003158 // Got the stream ID for drop frame.
3159 dropFrame = true;
3160 break;
3161 }
3162 }
3163 } else {
3164 // This is instant AEC case.
3165 // For instant AEC drop the stream untill AEC is settled.
3166 dropFrame = true;
3167 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003168
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003169 if (dropFrame) {
3170 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3171 if (p_cam_frame_drop) {
3172 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003173 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003174 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003175 } else {
3176 // For instant AEC, inform frame drop and frame number
3177 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3178 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003179 pendingRequest.frame_number, streamID,
3180 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003181 }
3182 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003183 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003184 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003185 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003186 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003187 if (p_cam_frame_drop) {
3188 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003189 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003190 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003191 } else {
3192 // For instant AEC, inform frame drop and frame number
3193 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3194 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003195 pendingRequest.frame_number, streamID,
3196 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003197 }
3198 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003199 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003200 PendingFrameDrop.stream_ID = streamID;
3201 // Add the Frame drop info to mPendingFrameDropList
3202 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003204 }
3205 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003206 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003207
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003208 for (auto & pendingRequest : mPendingRequestsList) {
3209 // Find the pending request with the frame number.
3210 if (pendingRequest.frame_number == frame_number) {
3211 // Update the sensor timestamp.
3212 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003213
Thierry Strudel3d639192016-09-09 11:52:26 -07003214
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003215 /* Set the timestamp in display metadata so that clients aware of
3216 private_handle such as VT can use this un-modified timestamps.
3217 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003218 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003219
Thierry Strudel3d639192016-09-09 11:52:26 -07003220 // Find channel requiring metadata, meaning internal offline postprocess
3221 // is needed.
3222 //TODO: for now, we don't support two streams requiring metadata at the same time.
3223 // (because we are not making copies, and metadata buffer is not reference counted.
3224 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003225 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3226 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003227 if (iter->need_metadata) {
3228 internalPproc = true;
3229 QCamera3ProcessingChannel *channel =
3230 (QCamera3ProcessingChannel *)iter->stream->priv;
3231 channel->queueReprocMetadata(metadata_buf);
3232 break;
3233 }
3234 }
3235
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003236 for (auto itr = pendingRequest.internalRequestList.begin();
3237 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003238 if (itr->need_metadata) {
3239 internalPproc = true;
3240 QCamera3ProcessingChannel *channel =
3241 (QCamera3ProcessingChannel *)itr->stream->priv;
3242 channel->queueReprocMetadata(metadata_buf);
3243 break;
3244 }
3245 }
3246
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003247 resultMetadata = translateFromHalMetadata(metadata,
3248 pendingRequest.timestamp, pendingRequest.request_id,
3249 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3250 pendingRequest.capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08003251 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003252 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003253 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003254 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003255 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003256
3257 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003258 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003259
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003260 if (pendingRequest.blob_request) {
3261 //Dump tuning metadata if enabled and available
3262 char prop[PROPERTY_VALUE_MAX];
3263 memset(prop, 0, sizeof(prop));
3264 property_get("persist.camera.dumpmetadata", prop, "0");
3265 int32_t enabled = atoi(prop);
3266 if (enabled && metadata->is_tuning_params_valid) {
3267 dumpMetadataToFile(metadata->tuning_params,
3268 mMetaFrameCount,
3269 enabled,
3270 "Snapshot",
3271 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003272 }
3273 }
3274
3275 if (!internalPproc) {
3276 LOGD("couldn't find need_metadata for this metadata");
3277 // Return metadata buffer
3278 if (free_and_bufdone_meta_buf) {
3279 mMetadataChannel->bufDone(metadata_buf);
3280 free(metadata_buf);
3281 }
3282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003283
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003284 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003285 }
3286 }
3287
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003288 // Try to send out shutter callbacks and capture results.
3289 handlePendingResultsWithLock(frame_number, resultMetadata);
3290 return;
3291
Thierry Strudel3d639192016-09-09 11:52:26 -07003292done_metadata:
3293 for (pendingRequestIterator i = mPendingRequestsList.begin();
3294 i != mPendingRequestsList.end() ;i++) {
3295 i->pipeline_depth++;
3296 }
3297 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3298 unblockRequestIfNecessary();
3299}
3300
3301/*===========================================================================
3302 * FUNCTION : hdrPlusPerfLock
3303 *
3304 * DESCRIPTION: perf lock for HDR+ using custom intent
3305 *
3306 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3307 *
3308 * RETURN : None
3309 *
3310 *==========================================================================*/
3311void QCamera3HardwareInterface::hdrPlusPerfLock(
3312 mm_camera_super_buf_t *metadata_buf)
3313{
3314 if (NULL == metadata_buf) {
3315 LOGE("metadata_buf is NULL");
3316 return;
3317 }
3318 metadata_buffer_t *metadata =
3319 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3320 int32_t *p_frame_number_valid =
3321 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3322 uint32_t *p_frame_number =
3323 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3324
3325 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3326 LOGE("%s: Invalid metadata", __func__);
3327 return;
3328 }
3329
3330 //acquire perf lock for 5 sec after the last HDR frame is captured
3331 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3332 if ((p_frame_number != NULL) &&
3333 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003334 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003335 }
3336 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003337}
3338
3339/*===========================================================================
3340 * FUNCTION : handleInputBufferWithLock
3341 *
3342 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3343 *
3344 * PARAMETERS : @frame_number: frame number of the input buffer
3345 *
3346 * RETURN :
3347 *
3348 *==========================================================================*/
3349void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 pendingRequestIterator i = mPendingRequestsList.begin();
3353 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3354 i++;
3355 }
3356 if (i != mPendingRequestsList.end() && i->input_buffer) {
3357 //found the right request
3358 if (!i->shutter_notified) {
3359 CameraMetadata settings;
3360 camera3_notify_msg_t notify_msg;
3361 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3362 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3363 if(i->settings) {
3364 settings = i->settings;
3365 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3366 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3367 } else {
3368 LOGE("No timestamp in input settings! Using current one.");
3369 }
3370 } else {
3371 LOGE("Input settings missing!");
3372 }
3373
3374 notify_msg.type = CAMERA3_MSG_SHUTTER;
3375 notify_msg.message.shutter.frame_number = frame_number;
3376 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003377 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 i->shutter_notified = true;
3379 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3380 i->frame_number, notify_msg.message.shutter.timestamp);
3381 }
3382
3383 if (i->input_buffer->release_fence != -1) {
3384 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3385 close(i->input_buffer->release_fence);
3386 if (rc != OK) {
3387 LOGE("input buffer sync wait failed %d", rc);
3388 }
3389 }
3390
3391 camera3_capture_result result;
3392 memset(&result, 0, sizeof(camera3_capture_result));
3393 result.frame_number = frame_number;
3394 result.result = i->settings;
3395 result.input_buffer = i->input_buffer;
3396 result.partial_result = PARTIAL_RESULT_COUNT;
3397
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003398 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 LOGD("Input request metadata and input buffer frame_number = %u",
3400 i->frame_number);
3401 i = erasePendingRequest(i);
3402 } else {
3403 LOGE("Could not find input request for frame number %d", frame_number);
3404 }
3405}
3406
3407/*===========================================================================
3408 * FUNCTION : handleBufferWithLock
3409 *
3410 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3411 *
3412 * PARAMETERS : @buffer: image buffer for the callback
3413 * @frame_number: frame number of the image buffer
3414 *
3415 * RETURN :
3416 *
3417 *==========================================================================*/
3418void QCamera3HardwareInterface::handleBufferWithLock(
3419 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3420{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003421 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003422
3423 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3424 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3425 }
3426
Thierry Strudel3d639192016-09-09 11:52:26 -07003427 /* Nothing to be done during error state */
3428 if ((ERROR == mState) || (DEINIT == mState)) {
3429 return;
3430 }
3431 if (mFlushPerf) {
3432 handleBuffersDuringFlushLock(buffer);
3433 return;
3434 }
3435 //not in flush
3436 // If the frame number doesn't exist in the pending request list,
3437 // directly send the buffer to the frameworks, and update pending buffers map
3438 // Otherwise, book-keep the buffer.
3439 pendingRequestIterator i = mPendingRequestsList.begin();
3440 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3441 i++;
3442 }
3443 if (i == mPendingRequestsList.end()) {
3444 // Verify all pending requests frame_numbers are greater
3445 for (pendingRequestIterator j = mPendingRequestsList.begin();
3446 j != mPendingRequestsList.end(); j++) {
3447 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3448 LOGW("Error: pending live frame number %d is smaller than %d",
3449 j->frame_number, frame_number);
3450 }
3451 }
3452 camera3_capture_result_t result;
3453 memset(&result, 0, sizeof(camera3_capture_result_t));
3454 result.result = NULL;
3455 result.frame_number = frame_number;
3456 result.num_output_buffers = 1;
3457 result.partial_result = 0;
3458 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3459 m != mPendingFrameDropList.end(); m++) {
3460 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3461 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3462 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3463 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3464 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3465 frame_number, streamID);
3466 m = mPendingFrameDropList.erase(m);
3467 break;
3468 }
3469 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003470 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003471 result.output_buffers = buffer;
3472 LOGH("result frame_number = %d, buffer = %p",
3473 frame_number, buffer->buffer);
3474
3475 mPendingBuffersMap.removeBuf(buffer->buffer);
3476
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003477 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 } else {
3479 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 if (i->input_buffer->release_fence != -1) {
3481 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3482 close(i->input_buffer->release_fence);
3483 if (rc != OK) {
3484 LOGE("input buffer sync wait failed %d", rc);
3485 }
3486 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003488
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003489 // Put buffer into the pending request
3490 for (auto &requestedBuffer : i->buffers) {
3491 if (requestedBuffer.stream == buffer->stream) {
3492 if (requestedBuffer.buffer != nullptr) {
3493 LOGE("Error: buffer is already set");
3494 } else {
3495 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3496 sizeof(camera3_stream_buffer_t));
3497 *(requestedBuffer.buffer) = *buffer;
3498 LOGH("cache buffer %p at result frame_number %u",
3499 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 }
3501 }
3502 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003503
3504 if (i->input_buffer) {
3505 // For a reprocessing request, try to send out shutter callback and result metadata.
3506 handlePendingResultsWithLock(frame_number, nullptr);
3507 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003508 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003509
3510 if (mPreviewStarted == false) {
3511 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3512 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3513 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3514 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3515 mPreviewStarted = true;
3516
3517 // Set power hint for preview
3518 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3519 }
3520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003521}
3522
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003523void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3524 const camera_metadata_t *resultMetadata)
3525{
3526 // Find the pending request for this result metadata.
3527 auto requestIter = mPendingRequestsList.begin();
3528 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3529 requestIter++;
3530 }
3531
3532 if (requestIter == mPendingRequestsList.end()) {
3533 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3534 return;
3535 }
3536
3537 // Update the result metadata
3538 requestIter->resultMetadata = resultMetadata;
3539
3540 // Check what type of request this is.
3541 bool liveRequest = false;
3542 if (requestIter->hdrplus) {
3543 // HDR+ request doesn't have partial results.
3544 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3545 } else if (requestIter->input_buffer != nullptr) {
3546 // Reprocessing request result is the same as settings.
3547 requestIter->resultMetadata = requestIter->settings;
3548 // Reprocessing request doesn't have partial results.
3549 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3550 } else {
3551 liveRequest = true;
3552 requestIter->partial_result_cnt++;
3553 mPendingLiveRequest--;
3554
3555 // For a live request, send the metadata to HDR+ client.
3556 if (mHdrPlusClient != nullptr) {
3557 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3558 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3559 }
3560 }
3561
3562 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3563 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3564 bool readyToSend = true;
3565
3566 // Iterate through the pending requests to send out shutter callbacks and results that are
3567 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3568 // live requests that don't have result metadata yet.
3569 auto iter = mPendingRequestsList.begin();
3570 while (iter != mPendingRequestsList.end()) {
3571 // Check if current pending request is ready. If it's not ready, the following pending
3572 // requests are also not ready.
3573 if (readyToSend && iter->resultMetadata == nullptr) {
3574 readyToSend = false;
3575 }
3576
3577 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3578
3579 std::vector<camera3_stream_buffer_t> outputBuffers;
3580
3581 camera3_capture_result_t result = {};
3582 result.frame_number = iter->frame_number;
3583 result.result = iter->resultMetadata;
3584 result.partial_result = iter->partial_result_cnt;
3585
3586 // If this pending buffer has result metadata, we may be able to send out shutter callback
3587 // and result metadata.
3588 if (iter->resultMetadata != nullptr) {
3589 if (!readyToSend) {
3590 // If any of the previous pending request is not ready, this pending request is
3591 // also not ready to send in order to keep shutter callbacks and result metadata
3592 // in order.
3593 iter++;
3594 continue;
3595 }
3596
3597 // Invoke shutter callback if not yet.
3598 if (!iter->shutter_notified) {
3599 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3600
3601 // Find the timestamp in HDR+ result metadata
3602 camera_metadata_ro_entry_t entry;
3603 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3604 ANDROID_SENSOR_TIMESTAMP, &entry);
3605 if (res != OK) {
3606 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3607 __FUNCTION__, iter->frame_number, strerror(-res), res);
3608 } else {
3609 timestamp = entry.data.i64[0];
3610 }
3611
3612 camera3_notify_msg_t notify_msg = {};
3613 notify_msg.type = CAMERA3_MSG_SHUTTER;
3614 notify_msg.message.shutter.frame_number = iter->frame_number;
3615 notify_msg.message.shutter.timestamp = timestamp;
3616 orchestrateNotify(&notify_msg);
3617 iter->shutter_notified = true;
3618 }
3619
3620 result.input_buffer = iter->input_buffer;
3621
3622 // Prepare output buffer array
3623 for (auto bufferInfoIter = iter->buffers.begin();
3624 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3625 if (bufferInfoIter->buffer != nullptr) {
3626
3627 QCamera3Channel *channel =
3628 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3629 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3630
3631 // Check if this buffer is a dropped frame.
3632 auto frameDropIter = mPendingFrameDropList.begin();
3633 while (frameDropIter != mPendingFrameDropList.end()) {
3634 if((frameDropIter->stream_ID == streamID) &&
3635 (frameDropIter->frame_number == frameNumber)) {
3636 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3637 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3638 streamID);
3639 mPendingFrameDropList.erase(frameDropIter);
3640 break;
3641 } else {
3642 frameDropIter++;
3643 }
3644 }
3645
3646 // Check buffer error status
3647 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3648 bufferInfoIter->buffer->buffer);
3649 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3650
3651 outputBuffers.push_back(*(bufferInfoIter->buffer));
3652 free(bufferInfoIter->buffer);
3653 bufferInfoIter->buffer = NULL;
3654 }
3655 }
3656
3657 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3658 result.num_output_buffers = outputBuffers.size();
3659 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3660 // If the result metadata belongs to a live request, notify errors for previous pending
3661 // live requests.
3662 mPendingLiveRequest--;
3663
3664 CameraMetadata dummyMetadata;
3665 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3666 result.result = dummyMetadata.release();
3667
3668 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3669 } else {
3670 iter++;
3671 continue;
3672 }
3673
3674 orchestrateResult(&result);
3675
3676 // For reprocessing, result metadata is the same as settings so do not free it here to
3677 // avoid double free.
3678 if (result.result != iter->settings) {
3679 free_camera_metadata((camera_metadata_t *)result.result);
3680 }
3681 iter->resultMetadata = nullptr;
3682 iter = erasePendingRequest(iter);
3683 }
3684
3685 if (liveRequest) {
3686 for (auto &iter : mPendingRequestsList) {
3687 // Increment pipeline depth for the following pending requests.
3688 if (iter.frame_number > frameNumber) {
3689 iter.pipeline_depth++;
3690 }
3691 }
3692 }
3693
3694 unblockRequestIfNecessary();
3695}
3696
Thierry Strudel3d639192016-09-09 11:52:26 -07003697/*===========================================================================
3698 * FUNCTION : unblockRequestIfNecessary
3699 *
3700 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3701 * that mMutex is held when this function is called.
3702 *
3703 * PARAMETERS :
3704 *
3705 * RETURN :
3706 *
3707 *==========================================================================*/
3708void QCamera3HardwareInterface::unblockRequestIfNecessary()
3709{
3710 // Unblock process_capture_request
3711 pthread_cond_signal(&mRequestCond);
3712}
3713
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003714/*===========================================================================
3715 * FUNCTION : isHdrSnapshotRequest
3716 *
3717 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3718 *
3719 * PARAMETERS : camera3 request structure
3720 *
3721 * RETURN : boolean decision variable
3722 *
3723 *==========================================================================*/
3724bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3725{
3726 if (request == NULL) {
3727 LOGE("Invalid request handle");
3728 assert(0);
3729 return false;
3730 }
3731
3732 if (!mForceHdrSnapshot) {
3733 CameraMetadata frame_settings;
3734 frame_settings = request->settings;
3735
3736 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3737 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3738 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3739 return false;
3740 }
3741 } else {
3742 return false;
3743 }
3744
3745 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3746 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3747 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3748 return false;
3749 }
3750 } else {
3751 return false;
3752 }
3753 }
3754
3755 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3756 if (request->output_buffers[i].stream->format
3757 == HAL_PIXEL_FORMAT_BLOB) {
3758 return true;
3759 }
3760 }
3761
3762 return false;
3763}
3764/*===========================================================================
3765 * FUNCTION : orchestrateRequest
3766 *
3767 * DESCRIPTION: Orchestrates a capture request from camera service
3768 *
3769 * PARAMETERS :
3770 * @request : request from framework to process
3771 *
3772 * RETURN : Error status codes
3773 *
3774 *==========================================================================*/
3775int32_t QCamera3HardwareInterface::orchestrateRequest(
3776 camera3_capture_request_t *request)
3777{
3778
3779 uint32_t originalFrameNumber = request->frame_number;
3780 uint32_t originalOutputCount = request->num_output_buffers;
3781 const camera_metadata_t *original_settings = request->settings;
3782 List<InternalRequest> internallyRequestedStreams;
3783 List<InternalRequest> emptyInternalList;
3784
3785 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3786 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3787 uint32_t internalFrameNumber;
3788 CameraMetadata modified_meta;
3789
3790
3791 /* Add Blob channel to list of internally requested streams */
3792 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3793 if (request->output_buffers[i].stream->format
3794 == HAL_PIXEL_FORMAT_BLOB) {
3795 InternalRequest streamRequested;
3796 streamRequested.meteringOnly = 1;
3797 streamRequested.need_metadata = 0;
3798 streamRequested.stream = request->output_buffers[i].stream;
3799 internallyRequestedStreams.push_back(streamRequested);
3800 }
3801 }
3802 request->num_output_buffers = 0;
3803 auto itr = internallyRequestedStreams.begin();
3804
3805 /* Modify setting to set compensation */
3806 modified_meta = request->settings;
3807 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3808 uint8_t aeLock = 1;
3809 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3810 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3811 camera_metadata_t *modified_settings = modified_meta.release();
3812 request->settings = modified_settings;
3813
3814 /* Capture Settling & -2x frame */
3815 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3816 request->frame_number = internalFrameNumber;
3817 processCaptureRequest(request, internallyRequestedStreams);
3818
3819 request->num_output_buffers = originalOutputCount;
3820 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3821 request->frame_number = internalFrameNumber;
3822 processCaptureRequest(request, emptyInternalList);
3823 request->num_output_buffers = 0;
3824
3825 modified_meta = modified_settings;
3826 expCompensation = 0;
3827 aeLock = 1;
3828 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3829 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3830 modified_settings = modified_meta.release();
3831 request->settings = modified_settings;
3832
3833 /* Capture Settling & 0X frame */
3834
3835 itr = internallyRequestedStreams.begin();
3836 if (itr == internallyRequestedStreams.end()) {
3837 LOGE("Error Internally Requested Stream list is empty");
3838 assert(0);
3839 } else {
3840 itr->need_metadata = 0;
3841 itr->meteringOnly = 1;
3842 }
3843
3844 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3845 request->frame_number = internalFrameNumber;
3846 processCaptureRequest(request, internallyRequestedStreams);
3847
3848 itr = internallyRequestedStreams.begin();
3849 if (itr == internallyRequestedStreams.end()) {
3850 ALOGE("Error Internally Requested Stream list is empty");
3851 assert(0);
3852 } else {
3853 itr->need_metadata = 1;
3854 itr->meteringOnly = 0;
3855 }
3856
3857 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3858 request->frame_number = internalFrameNumber;
3859 processCaptureRequest(request, internallyRequestedStreams);
3860
3861 /* Capture 2X frame*/
3862 modified_meta = modified_settings;
3863 expCompensation = GB_HDR_2X_STEP_EV;
3864 aeLock = 1;
3865 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3866 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3867 modified_settings = modified_meta.release();
3868 request->settings = modified_settings;
3869
3870 itr = internallyRequestedStreams.begin();
3871 if (itr == internallyRequestedStreams.end()) {
3872 ALOGE("Error Internally Requested Stream list is empty");
3873 assert(0);
3874 } else {
3875 itr->need_metadata = 0;
3876 itr->meteringOnly = 1;
3877 }
3878 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3879 request->frame_number = internalFrameNumber;
3880 processCaptureRequest(request, internallyRequestedStreams);
3881
3882 itr = internallyRequestedStreams.begin();
3883 if (itr == internallyRequestedStreams.end()) {
3884 ALOGE("Error Internally Requested Stream list is empty");
3885 assert(0);
3886 } else {
3887 itr->need_metadata = 1;
3888 itr->meteringOnly = 0;
3889 }
3890
3891 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3892 request->frame_number = internalFrameNumber;
3893 processCaptureRequest(request, internallyRequestedStreams);
3894
3895
3896 /* Capture 2X on original streaming config*/
3897 internallyRequestedStreams.clear();
3898
3899 /* Restore original settings pointer */
3900 request->settings = original_settings;
3901 } else {
3902 uint32_t internalFrameNumber;
3903 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3904 request->frame_number = internalFrameNumber;
3905 return processCaptureRequest(request, internallyRequestedStreams);
3906 }
3907
3908 return NO_ERROR;
3909}
3910
3911/*===========================================================================
3912 * FUNCTION : orchestrateResult
3913 *
3914 * DESCRIPTION: Orchestrates a capture result to camera service
3915 *
3916 * PARAMETERS :
3917 * @request : request from framework to process
3918 *
3919 * RETURN :
3920 *
3921 *==========================================================================*/
3922void QCamera3HardwareInterface::orchestrateResult(
3923 camera3_capture_result_t *result)
3924{
3925 uint32_t frameworkFrameNumber;
3926 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3927 frameworkFrameNumber);
3928 if (rc != NO_ERROR) {
3929 LOGE("Cannot find translated frameworkFrameNumber");
3930 assert(0);
3931 } else {
3932 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3933 LOGD("CAM_DEBUG Internal Request drop the result");
3934 } else {
3935 result->frame_number = frameworkFrameNumber;
3936 mCallbackOps->process_capture_result(mCallbackOps, result);
3937 }
3938 }
3939}
3940
3941/*===========================================================================
3942 * FUNCTION : orchestrateNotify
3943 *
3944 * DESCRIPTION: Orchestrates a notify to camera service
3945 *
3946 * PARAMETERS :
3947 * @request : request from framework to process
3948 *
3949 * RETURN :
3950 *
3951 *==========================================================================*/
3952void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3953{
3954 uint32_t frameworkFrameNumber;
3955 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3956 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3957 frameworkFrameNumber);
3958 if (rc != NO_ERROR) {
3959 LOGE("Cannot find translated frameworkFrameNumber");
3960 assert(0);
3961 } else {
3962 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3963 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3964 } else {
3965 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3966 mCallbackOps->notify(mCallbackOps, notify_msg);
3967 }
3968 }
3969}
3970
3971/*===========================================================================
3972 * FUNCTION : FrameNumberRegistry
3973 *
3974 * DESCRIPTION: Constructor
3975 *
3976 * PARAMETERS :
3977 *
3978 * RETURN :
3979 *
3980 *==========================================================================*/
3981FrameNumberRegistry::FrameNumberRegistry()
3982{
3983 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3984}
3985
3986/*===========================================================================
3987 * FUNCTION : ~FrameNumberRegistry
3988 *
3989 * DESCRIPTION: Destructor
3990 *
3991 * PARAMETERS :
3992 *
3993 * RETURN :
3994 *
3995 *==========================================================================*/
3996FrameNumberRegistry::~FrameNumberRegistry()
3997{
3998}
3999
4000/*===========================================================================
4001 * FUNCTION : PurgeOldEntriesLocked
4002 *
4003 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4004 *
4005 * PARAMETERS :
4006 *
4007 * RETURN : NONE
4008 *
4009 *==========================================================================*/
4010void FrameNumberRegistry::purgeOldEntriesLocked()
4011{
4012 while (_register.begin() != _register.end()) {
4013 auto itr = _register.begin();
4014 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4015 _register.erase(itr);
4016 } else {
4017 return;
4018 }
4019 }
4020}
4021
4022/*===========================================================================
4023 * FUNCTION : allocStoreInternalFrameNumber
4024 *
4025 * DESCRIPTION: Method to note down a framework request and associate a new
4026 * internal request number against it
4027 *
4028 * PARAMETERS :
4029 * @fFrameNumber: Identifier given by framework
4030 * @internalFN : Output parameter which will have the newly generated internal
4031 * entry
4032 *
4033 * RETURN : Error code
4034 *
4035 *==========================================================================*/
4036int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4037 uint32_t &internalFrameNumber)
4038{
4039 Mutex::Autolock lock(mRegistryLock);
4040 internalFrameNumber = _nextFreeInternalNumber++;
4041 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4042 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4043 purgeOldEntriesLocked();
4044 return NO_ERROR;
4045}
4046
4047/*===========================================================================
4048 * FUNCTION : generateStoreInternalFrameNumber
4049 *
4050 * DESCRIPTION: Method to associate a new internal request number independent
4051 * of any associate with framework requests
4052 *
4053 * PARAMETERS :
4054 * @internalFrame#: Output parameter which will have the newly generated internal
4055 *
4056 *
4057 * RETURN : Error code
4058 *
4059 *==========================================================================*/
4060int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4061{
4062 Mutex::Autolock lock(mRegistryLock);
4063 internalFrameNumber = _nextFreeInternalNumber++;
4064 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4065 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4066 purgeOldEntriesLocked();
4067 return NO_ERROR;
4068}
4069
4070/*===========================================================================
4071 * FUNCTION : getFrameworkFrameNumber
4072 *
4073 * DESCRIPTION: Method to query the framework framenumber given an internal #
4074 *
4075 * PARAMETERS :
4076 * @internalFrame#: Internal reference
4077 * @frameworkframenumber: Output parameter holding framework frame entry
4078 *
4079 * RETURN : Error code
4080 *
4081 *==========================================================================*/
4082int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4083 uint32_t &frameworkFrameNumber)
4084{
4085 Mutex::Autolock lock(mRegistryLock);
4086 auto itr = _register.find(internalFrameNumber);
4087 if (itr == _register.end()) {
4088 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
4089 return -ENOENT;
4090 }
4091
4092 frameworkFrameNumber = itr->second;
4093 purgeOldEntriesLocked();
4094 return NO_ERROR;
4095}
Thierry Strudel3d639192016-09-09 11:52:26 -07004096
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004097status_t QCamera3HardwareInterface::fillPbStreamConfig(
4098 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4099 QCamera3Channel *channel, uint32_t streamIndex) {
4100 if (config == nullptr) {
4101 LOGE("%s: config is null", __FUNCTION__);
4102 return BAD_VALUE;
4103 }
4104
4105 if (channel == nullptr) {
4106 LOGE("%s: channel is null", __FUNCTION__);
4107 return BAD_VALUE;
4108 }
4109
4110 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4111 if (stream == nullptr) {
4112 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4113 return NAME_NOT_FOUND;
4114 }
4115
4116 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4117 if (streamInfo == nullptr) {
4118 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4119 return NAME_NOT_FOUND;
4120 }
4121
4122 config->id = pbStreamId;
4123 config->image.width = streamInfo->dim.width;
4124 config->image.height = streamInfo->dim.height;
4125 config->image.padding = 0;
4126 config->image.format = pbStreamFormat;
4127
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004128 uint32_t totalPlaneSize = 0;
4129
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004130 // Fill plane information.
4131 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4132 pbcamera::PlaneConfiguration plane;
4133 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4134 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4135 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004136
4137 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004138 }
4139
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004140 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004141 return OK;
4142}
4143
Thierry Strudel3d639192016-09-09 11:52:26 -07004144/*===========================================================================
4145 * FUNCTION : processCaptureRequest
4146 *
4147 * DESCRIPTION: process a capture request from camera service
4148 *
4149 * PARAMETERS :
4150 * @request : request from framework to process
4151 *
4152 * RETURN :
4153 *
4154 *==========================================================================*/
4155int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004156 camera3_capture_request_t *request,
4157 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004158{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004159 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004160 int rc = NO_ERROR;
4161 int32_t request_id;
4162 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004163 bool isVidBufRequested = false;
4164 camera3_stream_buffer_t *pInputBuffer = NULL;
4165
4166 pthread_mutex_lock(&mMutex);
4167
4168 // Validate current state
4169 switch (mState) {
4170 case CONFIGURED:
4171 case STARTED:
4172 /* valid state */
4173 break;
4174
4175 case ERROR:
4176 pthread_mutex_unlock(&mMutex);
4177 handleCameraDeviceError();
4178 return -ENODEV;
4179
4180 default:
4181 LOGE("Invalid state %d", mState);
4182 pthread_mutex_unlock(&mMutex);
4183 return -ENODEV;
4184 }
4185
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004186 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004187 if (rc != NO_ERROR) {
4188 LOGE("incoming request is not valid");
4189 pthread_mutex_unlock(&mMutex);
4190 return rc;
4191 }
4192
4193 meta = request->settings;
4194
4195 // For first capture request, send capture intent, and
4196 // stream on all streams
4197 if (mState == CONFIGURED) {
4198 // send an unconfigure to the backend so that the isp
4199 // resources are deallocated
4200 if (!mFirstConfiguration) {
4201 cam_stream_size_info_t stream_config_info;
4202 int32_t hal_version = CAM_HAL_V3;
4203 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4204 stream_config_info.buffer_info.min_buffers =
4205 MIN_INFLIGHT_REQUESTS;
4206 stream_config_info.buffer_info.max_buffers =
4207 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4208 clear_metadata_buffer(mParameters);
4209 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4210 CAM_INTF_PARM_HAL_VERSION, hal_version);
4211 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4212 CAM_INTF_META_STREAM_INFO, stream_config_info);
4213 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4214 mParameters);
4215 if (rc < 0) {
4216 LOGE("set_parms for unconfigure failed");
4217 pthread_mutex_unlock(&mMutex);
4218 return rc;
4219 }
4220 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004221 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004222 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004223 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004224 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225 property_get("persist.camera.is_type", is_type_value, "4");
4226 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4227 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4228 property_get("persist.camera.is_type_preview", is_type_value, "4");
4229 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4230 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004231
4232 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4233 int32_t hal_version = CAM_HAL_V3;
4234 uint8_t captureIntent =
4235 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4236 mCaptureIntent = captureIntent;
4237 clear_metadata_buffer(mParameters);
4238 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4239 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4240 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004241 if (mFirstConfiguration) {
4242 // configure instant AEC
4243 // Instant AEC is a session based parameter and it is needed only
4244 // once per complete session after open camera.
4245 // i.e. This is set only once for the first capture request, after open camera.
4246 setInstantAEC(meta);
4247 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004248 uint8_t fwkVideoStabMode=0;
4249 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4250 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4251 }
4252
4253 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4254 // turn it on for video/preview
4255 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4256 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004257 int32_t vsMode;
4258 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4260 rc = BAD_VALUE;
4261 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004262 LOGD("setEis %d", setEis);
4263 bool eis3Supported = false;
4264 size_t count = IS_TYPE_MAX;
4265 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4266 for (size_t i = 0; i < count; i++) {
4267 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4268 eis3Supported = true;
4269 break;
4270 }
4271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004272
4273 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004274 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004275 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4276 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004277 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4278 is_type = isTypePreview;
4279 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4280 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4281 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004282 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004283 } else {
4284 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004285 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004286 } else {
4287 is_type = IS_TYPE_NONE;
4288 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004289 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004290 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004291 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4292 }
4293 }
4294
4295 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4296 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4297
4298 int32_t tintless_value = 1;
4299 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4300 CAM_INTF_PARM_TINTLESS, tintless_value);
4301 //Disable CDS for HFR mode or if DIS/EIS is on.
4302 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4303 //after every configure_stream
4304 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4305 (m_bIsVideo)) {
4306 int32_t cds = CAM_CDS_MODE_OFF;
4307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4308 CAM_INTF_PARM_CDS_MODE, cds))
4309 LOGE("Failed to disable CDS for HFR mode");
4310
4311 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004312
4313 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4314 uint8_t* use_av_timer = NULL;
4315
4316 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004317 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004318 use_av_timer = &m_debug_avtimer;
4319 }
4320 else{
4321 use_av_timer =
4322 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004323 if (use_av_timer) {
4324 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4325 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004326 }
4327
4328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4329 rc = BAD_VALUE;
4330 }
4331 }
4332
Thierry Strudel3d639192016-09-09 11:52:26 -07004333 setMobicat();
4334
4335 /* Set fps and hfr mode while sending meta stream info so that sensor
4336 * can configure appropriate streaming mode */
4337 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004338 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4339 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004340 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4341 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004342 if (rc == NO_ERROR) {
4343 int32_t max_fps =
4344 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004345 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004346 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4347 }
4348 /* For HFR, more buffers are dequeued upfront to improve the performance */
4349 if (mBatchSize) {
4350 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4351 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4352 }
4353 }
4354 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004355 LOGE("setHalFpsRange failed");
4356 }
4357 }
4358 if (meta.exists(ANDROID_CONTROL_MODE)) {
4359 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4360 rc = extractSceneMode(meta, metaMode, mParameters);
4361 if (rc != NO_ERROR) {
4362 LOGE("extractSceneMode failed");
4363 }
4364 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004365 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004366
Thierry Strudel04e026f2016-10-10 11:27:36 -07004367 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4368 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4369 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4370 rc = setVideoHdrMode(mParameters, vhdr);
4371 if (rc != NO_ERROR) {
4372 LOGE("setVideoHDR is failed");
4373 }
4374 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004375
Thierry Strudel3d639192016-09-09 11:52:26 -07004376 //TODO: validate the arguments, HSV scenemode should have only the
4377 //advertised fps ranges
4378
4379 /*set the capture intent, hal version, tintless, stream info,
4380 *and disenable parameters to the backend*/
4381 LOGD("set_parms META_STREAM_INFO " );
4382 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4383 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004384 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004385 mStreamConfigInfo.type[i],
4386 mStreamConfigInfo.stream_sizes[i].width,
4387 mStreamConfigInfo.stream_sizes[i].height,
4388 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004389 mStreamConfigInfo.format[i],
4390 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004391 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004392
Thierry Strudel3d639192016-09-09 11:52:26 -07004393 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4394 mParameters);
4395 if (rc < 0) {
4396 LOGE("set_parms failed for hal version, stream info");
4397 }
4398
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004399 cam_sensor_mode_info_t sensor_mode_info;
4400 memset(&sensor_mode_info, 0, sizeof(sensor_mode_info));
4401 rc = getSensorModeInfo(sensor_mode_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07004402 if (rc != NO_ERROR) {
4403 LOGE("Failed to get sensor output size");
4404 pthread_mutex_unlock(&mMutex);
4405 goto error_exit;
4406 }
4407
4408 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4409 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004410 sensor_mode_info.active_array_size.width,
4411 sensor_mode_info.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004412
4413 /* Set batchmode before initializing channel. Since registerBuffer
4414 * internally initializes some of the channels, better set batchmode
4415 * even before first register buffer */
4416 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4417 it != mStreamInfo.end(); it++) {
4418 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4419 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4420 && mBatchSize) {
4421 rc = channel->setBatchSize(mBatchSize);
4422 //Disable per frame map unmap for HFR/batchmode case
4423 rc |= channel->setPerFrameMapUnmap(false);
4424 if (NO_ERROR != rc) {
4425 LOGE("Channel init failed %d", rc);
4426 pthread_mutex_unlock(&mMutex);
4427 goto error_exit;
4428 }
4429 }
4430 }
4431
4432 //First initialize all streams
4433 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4434 it != mStreamInfo.end(); it++) {
4435 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4436 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4437 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004438 setEis) {
4439 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4440 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4441 is_type = mStreamConfigInfo.is_type[i];
4442 break;
4443 }
4444 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004445 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004446 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004447 rc = channel->initialize(IS_TYPE_NONE);
4448 }
4449 if (NO_ERROR != rc) {
4450 LOGE("Channel initialization failed %d", rc);
4451 pthread_mutex_unlock(&mMutex);
4452 goto error_exit;
4453 }
4454 }
4455
4456 if (mRawDumpChannel) {
4457 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4458 if (rc != NO_ERROR) {
4459 LOGE("Error: Raw Dump Channel init failed");
4460 pthread_mutex_unlock(&mMutex);
4461 goto error_exit;
4462 }
4463 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004464 if (mHdrPlusRawSrcChannel) {
4465 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4466 if (rc != NO_ERROR) {
4467 LOGE("Error: HDR+ RAW Source Channel init failed");
4468 pthread_mutex_unlock(&mMutex);
4469 goto error_exit;
4470 }
4471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004472 if (mSupportChannel) {
4473 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4474 if (rc < 0) {
4475 LOGE("Support channel initialization failed");
4476 pthread_mutex_unlock(&mMutex);
4477 goto error_exit;
4478 }
4479 }
4480 if (mAnalysisChannel) {
4481 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4482 if (rc < 0) {
4483 LOGE("Analysis channel initialization failed");
4484 pthread_mutex_unlock(&mMutex);
4485 goto error_exit;
4486 }
4487 }
4488 if (mDummyBatchChannel) {
4489 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4490 if (rc < 0) {
4491 LOGE("mDummyBatchChannel setBatchSize failed");
4492 pthread_mutex_unlock(&mMutex);
4493 goto error_exit;
4494 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004495 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004496 if (rc < 0) {
4497 LOGE("mDummyBatchChannel initialization failed");
4498 pthread_mutex_unlock(&mMutex);
4499 goto error_exit;
4500 }
4501 }
4502
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004503 if (mHdrPlusClient != nullptr) {
4504 pbcamera::InputConfiguration inputConfig;
4505 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
4506
4507 // Configure HDR+ client streams.
4508 // Get input config.
4509 if (mHdrPlusRawSrcChannel) {
4510 // HDR+ input buffers will be provided by HAL.
4511 rc = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
4512 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
4513 if (rc != OK) {
4514 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream.",
4515 __FUNCTION__);
4516 pthread_mutex_unlock(&mMutex);
4517 goto error_exit;
4518 }
4519
4520 inputConfig.isSensorInput = false;
4521 } else {
4522 // Sensor MIPI will send data to Easel.
4523 inputConfig.isSensorInput = true;
4524 inputConfig.sensorMode.pixelArrayWidth =
4525 sensor_mode_info.pixel_array_size.width;
4526 inputConfig.sensorMode.pixelArrayHeight =
4527 sensor_mode_info.pixel_array_size.height;
4528 inputConfig.sensorMode.activeArrayWidth =
4529 sensor_mode_info.active_array_size.width;
4530 inputConfig.sensorMode.activeArrayHeight =
4531 sensor_mode_info.active_array_size.height;
4532 inputConfig.sensorMode.outputPixelClkHz =
4533 sensor_mode_info.op_pixel_clk;
4534 }
4535
4536 // Get output configurations.
4537 // Easel may need to output RAW16 buffers if mRawChannel was created.
4538 if (mRawChannel != nullptr) {
4539 pbcamera::StreamConfiguration outputConfig;
4540 rc = fillPbStreamConfig(&outputConfig, kPbRaw16OutputStreamId,
4541 HAL_PIXEL_FORMAT_RAW16, mRawChannel, /*stream index*/0);
4542 if (rc != OK) {
4543 LOGE("%s: Failed to get fill stream config for raw stream.", __FUNCTION__);
4544 pthread_mutex_unlock(&mMutex);
4545 goto error_exit;
4546 }
4547 outputStreamConfigs.push_back(outputConfig);
4548 }
4549
4550 // Easel may need to output YUV output buffers if mPictureChannel was created.
4551 if (mPictureChannel != nullptr) {
4552 pbcamera::StreamConfiguration outputConfig;
4553 rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
4554 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
4555 if (rc != OK) {
4556 LOGE("%s: Failed to get fill stream config for YUV stream.", __FUNCTION__);
4557 pthread_mutex_unlock(&mMutex);
4558 goto error_exit;
4559 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004560
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004561 outputStreamConfigs.push_back(outputConfig);
4562 }
4563
4564 // TODO: consider other channels for YUV output buffers.
4565
4566 rc = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
4567 if (rc != OK) {
4568 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
4569 strerror(-rc), rc);
4570 pthread_mutex_unlock(&mMutex);
4571 goto error_exit;
4572 }
4573 }
4574
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 // Set bundle info
4576 rc = setBundleInfo();
4577 if (rc < 0) {
4578 LOGE("setBundleInfo failed %d", rc);
4579 pthread_mutex_unlock(&mMutex);
4580 goto error_exit;
4581 }
4582
4583 //update settings from app here
4584 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4585 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4586 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4587 }
4588 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4589 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4590 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4591 }
4592 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4593 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4594 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4595
4596 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4597 (mLinkedCameraId != mCameraId) ) {
4598 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4599 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004600 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004601 goto error_exit;
4602 }
4603 }
4604
4605 // add bundle related cameras
4606 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4607 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004608 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4609 &m_pDualCamCmdPtr->bundle_info;
4610 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 if (mIsDeviceLinked)
4612 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4613 else
4614 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4615
4616 pthread_mutex_lock(&gCamLock);
4617
4618 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4619 LOGE("Dualcam: Invalid Session Id ");
4620 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004621 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004622 goto error_exit;
4623 }
4624
4625 if (mIsMainCamera == 1) {
4626 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4627 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004628 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004629 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004630 // related session id should be session id of linked session
4631 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4632 } else {
4633 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4634 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004635 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004636 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004637 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4638 }
4639 pthread_mutex_unlock(&gCamLock);
4640
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004641 rc = mCameraHandle->ops->set_dual_cam_cmd(
4642 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004643 if (rc < 0) {
4644 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004645 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004646 goto error_exit;
4647 }
4648 }
4649
4650 //Then start them.
4651 LOGH("Start META Channel");
4652 rc = mMetadataChannel->start();
4653 if (rc < 0) {
4654 LOGE("META channel start failed");
4655 pthread_mutex_unlock(&mMutex);
4656 goto error_exit;
4657 }
4658
4659 if (mAnalysisChannel) {
4660 rc = mAnalysisChannel->start();
4661 if (rc < 0) {
4662 LOGE("Analysis channel start failed");
4663 mMetadataChannel->stop();
4664 pthread_mutex_unlock(&mMutex);
4665 goto error_exit;
4666 }
4667 }
4668
4669 if (mSupportChannel) {
4670 rc = mSupportChannel->start();
4671 if (rc < 0) {
4672 LOGE("Support channel start failed");
4673 mMetadataChannel->stop();
4674 /* Although support and analysis are mutually exclusive today
4675 adding it in anycase for future proofing */
4676 if (mAnalysisChannel) {
4677 mAnalysisChannel->stop();
4678 }
4679 pthread_mutex_unlock(&mMutex);
4680 goto error_exit;
4681 }
4682 }
4683 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4684 it != mStreamInfo.end(); it++) {
4685 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4686 LOGH("Start Processing Channel mask=%d",
4687 channel->getStreamTypeMask());
4688 rc = channel->start();
4689 if (rc < 0) {
4690 LOGE("channel start failed");
4691 pthread_mutex_unlock(&mMutex);
4692 goto error_exit;
4693 }
4694 }
4695
4696 if (mRawDumpChannel) {
4697 LOGD("Starting raw dump stream");
4698 rc = mRawDumpChannel->start();
4699 if (rc != NO_ERROR) {
4700 LOGE("Error Starting Raw Dump Channel");
4701 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4702 it != mStreamInfo.end(); it++) {
4703 QCamera3Channel *channel =
4704 (QCamera3Channel *)(*it)->stream->priv;
4705 LOGH("Stopping Processing Channel mask=%d",
4706 channel->getStreamTypeMask());
4707 channel->stop();
4708 }
4709 if (mSupportChannel)
4710 mSupportChannel->stop();
4711 if (mAnalysisChannel) {
4712 mAnalysisChannel->stop();
4713 }
4714 mMetadataChannel->stop();
4715 pthread_mutex_unlock(&mMutex);
4716 goto error_exit;
4717 }
4718 }
4719
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004720 if (mHdrPlusRawSrcChannel) {
4721 LOGD("Starting HDR+ RAW stream");
4722 rc = mHdrPlusRawSrcChannel->start();
4723 if (rc != NO_ERROR) {
4724 LOGE("Error Starting HDR+ RAW Channel");
4725 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4726 it != mStreamInfo.end(); it++) {
4727 QCamera3Channel *channel =
4728 (QCamera3Channel *)(*it)->stream->priv;
4729 LOGH("Stopping Processing Channel mask=%d",
4730 channel->getStreamTypeMask());
4731 channel->stop();
4732 }
4733 if (mSupportChannel)
4734 mSupportChannel->stop();
4735 if (mAnalysisChannel) {
4736 mAnalysisChannel->stop();
4737 }
4738 if (mRawDumpChannel) {
4739 mRawDumpChannel->stop();
4740 }
4741 mMetadataChannel->stop();
4742 pthread_mutex_unlock(&mMutex);
4743 goto error_exit;
4744 }
4745 }
4746
Thierry Strudel3d639192016-09-09 11:52:26 -07004747 if (mChannelHandle) {
4748
4749 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4750 mChannelHandle);
4751 if (rc != NO_ERROR) {
4752 LOGE("start_channel failed %d", rc);
4753 pthread_mutex_unlock(&mMutex);
4754 goto error_exit;
4755 }
4756 }
4757
4758 goto no_error;
4759error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004760 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004761 return rc;
4762no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 mWokenUpByDaemon = false;
4764 mPendingLiveRequest = 0;
4765 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 }
4767
4768 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004769 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004770
4771 if (mFlushPerf) {
4772 //we cannot accept any requests during flush
4773 LOGE("process_capture_request cannot proceed during flush");
4774 pthread_mutex_unlock(&mMutex);
4775 return NO_ERROR; //should return an error
4776 }
4777
4778 if (meta.exists(ANDROID_REQUEST_ID)) {
4779 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4780 mCurrentRequestId = request_id;
4781 LOGD("Received request with id: %d", request_id);
4782 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4783 LOGE("Unable to find request id field, \
4784 & no previous id available");
4785 pthread_mutex_unlock(&mMutex);
4786 return NAME_NOT_FOUND;
4787 } else {
4788 LOGD("Re-using old request id");
4789 request_id = mCurrentRequestId;
4790 }
4791
4792 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4793 request->num_output_buffers,
4794 request->input_buffer,
4795 frameNumber);
4796 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004797 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 int blob_request = 0;
4799 uint32_t snapshotStreamId = 0;
4800 for (size_t i = 0; i < request->num_output_buffers; i++) {
4801 const camera3_stream_buffer_t& output = request->output_buffers[i];
4802 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4803
4804 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004805 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004806 blob_request = 1;
4807 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4808 }
4809
4810 if (output.acquire_fence != -1) {
4811 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4812 close(output.acquire_fence);
4813 if (rc != OK) {
4814 LOGE("sync wait failed %d", rc);
4815 pthread_mutex_unlock(&mMutex);
4816 return rc;
4817 }
4818 }
4819
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004820 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004821 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004822
4823 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4824 isVidBufRequested = true;
4825 }
4826 }
4827
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004828 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4829 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4830 itr++) {
4831 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4832 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4833 channel->getStreamID(channel->getStreamTypeMask());
4834
4835 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4836 isVidBufRequested = true;
4837 }
4838 }
4839
Thierry Strudel3d639192016-09-09 11:52:26 -07004840 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004841 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004842 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 }
4844 if (blob_request && mRawDumpChannel) {
4845 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004846 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004848 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 }
4850
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004851 {
4852 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4853 // Request a RAW buffer if
4854 // 1. mHdrPlusRawSrcChannel is valid.
4855 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
4856 // 3. There is no pending HDR+ request.
4857 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
4858 mHdrPlusPendingRequests.size() == 0) {
4859 streamsArray.stream_request[streamsArray.num_streams].streamID =
4860 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
4861 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4862 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004863 }
4864
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004865 //extract capture intent
4866 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4867 mCaptureIntent =
4868 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4869 }
4870
4871 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4872 mCacMode =
4873 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4874 }
4875
4876 bool hdrPlusRequest = false;
4877
4878 // Decide if this is an HDR+ capture request.
4879 if (mHdrPlusClient != nullptr &&
4880 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
4881 bool highQualityPostProcessing = true;
4882
4883 // Check noise reduction mode is high quality.
4884 if (!meta.exists(ANDROID_NOISE_REDUCTION_MODE) ||
4885 meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
4886 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
4887 highQualityPostProcessing = false;
4888 }
4889
4890 // Check edge mode is high quality.
4891 if (!meta.exists(ANDROID_EDGE_MODE) ||
4892 meta.find(ANDROID_EDGE_MODE).data.u8[0] !=
4893 ANDROID_EDGE_MODE_HIGH_QUALITY) {
4894 highQualityPostProcessing = false;
4895 }
4896
4897 // If all post processing is high quality, this still capture request is an HDR+ request.
4898 // TODO: support more than a single JPEG output buffer.
4899 if (highQualityPostProcessing && request->num_output_buffers == 1 &&
4900 request->output_buffers[0].stream->format == HAL_PIXEL_FORMAT_BLOB) {
4901 auto frame = std::make_shared<mm_camera_buf_def_t>();
4902
4903 // Get a YUV buffer from pic channel.
4904 QCamera3PicChannel *picChannel =
4905 (QCamera3PicChannel*)request->output_buffers[0].stream->priv;
4906 rc = picChannel->getYuvBufferForRequest(frame.get(), frameNumber);
4907 if (rc != OK) {
4908 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
4909 __FUNCTION__, strerror(-rc), rc);
4910 pthread_mutex_unlock(&mMutex);
4911 return rc;
4912 }
4913
4914 pbcamera::StreamBuffer buffer;
4915 buffer.streamId = kPbYuvOutputStreamId;
4916 buffer.data = frame->buffer;
4917 buffer.dataSize = frame->frame_len;
4918
4919 pbcamera::CaptureRequest pbRequest;
4920 pbRequest.id = frameNumber;
4921 pbRequest.outputBuffers.push_back(buffer);
4922
4923 // Submit an HDR+ capture request to HDR+ service.
4924 rc = mHdrPlusClient->submitCaptureRequest(&pbRequest);
4925 if (rc != OK) {
4926 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__,
4927 __LINE__, strerror(-rc), rc);
4928 }
4929
4930 hdrPlusRequest = true;
4931
4932 HdrPlusPendingRequest pendingHdrPlusRequest = {};
4933 pendingHdrPlusRequest.yuvBuffer = frame;
4934 pendingHdrPlusRequest.frameworkOutputBuffers.push_back(request->output_buffers[0]);
4935 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
4936 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
4937
4938 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4939 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
4940
4941 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
4942 } else {
4943 ALOGD("%s: Fall back to non HDR+ capture request. high quality: %d, number of "
4944 "output buffers: %d", __FUNCTION__, highQualityPostProcessing,
4945 request->num_output_buffers);
4946 }
4947 }
4948
4949 if(request->input_buffer == NULL && !hdrPlusRequest) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004950 /* Parse the settings:
4951 * - For every request in NORMAL MODE
4952 * - For every request in HFR mode during preview only case
4953 * - For first request of every batch in HFR mode during video
4954 * recording. In batchmode the same settings except frame number is
4955 * repeated in each request of the batch.
4956 */
4957 if (!mBatchSize ||
4958 (mBatchSize && !isVidBufRequested) ||
4959 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004960 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004961 if (rc < 0) {
4962 LOGE("fail to set frame parameters");
4963 pthread_mutex_unlock(&mMutex);
4964 return rc;
4965 }
4966 }
4967 /* For batchMode HFR, setFrameParameters is not called for every
4968 * request. But only frame number of the latest request is parsed.
4969 * Keep track of first and last frame numbers in a batch so that
4970 * metadata for the frame numbers of batch can be duplicated in
4971 * handleBatchMetadta */
4972 if (mBatchSize) {
4973 if (!mToBeQueuedVidBufs) {
4974 //start of the batch
4975 mFirstFrameNumberInBatch = request->frame_number;
4976 }
4977 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4978 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4979 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 return BAD_VALUE;
4982 }
4983 }
4984 if (mNeedSensorRestart) {
4985 /* Unlock the mutex as restartSensor waits on the channels to be
4986 * stopped, which in turn calls stream callback functions -
4987 * handleBufferWithLock and handleMetadataWithLock */
4988 pthread_mutex_unlock(&mMutex);
4989 rc = dynamicUpdateMetaStreamInfo();
4990 if (rc != NO_ERROR) {
4991 LOGE("Restarting the sensor failed");
4992 return BAD_VALUE;
4993 }
4994 mNeedSensorRestart = false;
4995 pthread_mutex_lock(&mMutex);
4996 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004997 if(mResetInstantAEC) {
4998 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4999 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5000 mResetInstantAEC = false;
5001 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005002 } else if (!hdrPlusRequest) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005003
5004 if (request->input_buffer->acquire_fence != -1) {
5005 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5006 close(request->input_buffer->acquire_fence);
5007 if (rc != OK) {
5008 LOGE("input buffer sync wait failed %d", rc);
5009 pthread_mutex_unlock(&mMutex);
5010 return rc;
5011 }
5012 }
5013 }
5014
5015 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5016 mLastCustIntentFrmNum = frameNumber;
5017 }
5018 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005019 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005020 pendingRequestIterator latestRequest;
5021 pendingRequest.frame_number = frameNumber;
5022 pendingRequest.num_buffers = request->num_output_buffers;
5023 pendingRequest.request_id = request_id;
5024 pendingRequest.blob_request = blob_request;
5025 pendingRequest.timestamp = 0;
5026 pendingRequest.bUrgentReceived = 0;
5027 if (request->input_buffer) {
5028 pendingRequest.input_buffer =
5029 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5030 *(pendingRequest.input_buffer) = *(request->input_buffer);
5031 pInputBuffer = pendingRequest.input_buffer;
5032 } else {
5033 pendingRequest.input_buffer = NULL;
5034 pInputBuffer = NULL;
5035 }
5036
5037 pendingRequest.pipeline_depth = 0;
5038 pendingRequest.partial_result_cnt = 0;
5039 extractJpegMetadata(mCurJpegMeta, request);
5040 pendingRequest.jpegMetadata = mCurJpegMeta;
5041 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5042 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005043 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08005044 /* DevCamDebug metadata processCaptureRequest */
5045 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5046 mDevCamDebugMetaEnable =
5047 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5048 }
5049 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5050 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005051
5052 //extract CAC info
5053 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5054 mCacMode =
5055 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5056 }
5057 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005058 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005059
5060 PendingBuffersInRequest bufsForCurRequest;
5061 bufsForCurRequest.frame_number = frameNumber;
5062 // Mark current timestamp for the new request
5063 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005064 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005065
5066 for (size_t i = 0; i < request->num_output_buffers; i++) {
5067 RequestedBufferInfo requestedBuf;
5068 memset(&requestedBuf, 0, sizeof(requestedBuf));
5069 requestedBuf.stream = request->output_buffers[i].stream;
5070 requestedBuf.buffer = NULL;
5071 pendingRequest.buffers.push_back(requestedBuf);
5072
5073 // Add to buffer handle the pending buffers list
5074 PendingBufferInfo bufferInfo;
5075 bufferInfo.buffer = request->output_buffers[i].buffer;
5076 bufferInfo.stream = request->output_buffers[i].stream;
5077 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5078 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5079 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5080 frameNumber, bufferInfo.buffer,
5081 channel->getStreamTypeMask(), bufferInfo.stream->format);
5082 }
5083 // Add this request packet into mPendingBuffersMap
5084 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5085 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5086 mPendingBuffersMap.get_num_overall_buffers());
5087
5088 latestRequest = mPendingRequestsList.insert(
5089 mPendingRequestsList.end(), pendingRequest);
5090 if(mFlush) {
5091 LOGI("mFlush is true");
5092 pthread_mutex_unlock(&mMutex);
5093 return NO_ERROR;
5094 }
5095
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005096 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5097 // channel.
5098 if (!hdrPlusRequest) {
5099 int indexUsed;
5100 // Notify metadata channel we receive a request
5101 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005102
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005103 if(request->input_buffer != NULL){
5104 LOGD("Input request, frame_number %d", frameNumber);
5105 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5106 if (NO_ERROR != rc) {
5107 LOGE("fail to set reproc parameters");
5108 pthread_mutex_unlock(&mMutex);
5109 return rc;
5110 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005111 }
5112
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005113 // Call request on other streams
5114 uint32_t streams_need_metadata = 0;
5115 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5116 for (size_t i = 0; i < request->num_output_buffers; i++) {
5117 const camera3_stream_buffer_t& output = request->output_buffers[i];
5118 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5119
5120 if (channel == NULL) {
5121 LOGW("invalid channel pointer for stream");
5122 continue;
5123 }
5124
5125 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5126 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5127 output.buffer, request->input_buffer, frameNumber);
5128 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005130 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5131 if (rc < 0) {
5132 LOGE("Fail to request on picture channel");
5133 pthread_mutex_unlock(&mMutex);
5134 return rc;
5135 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005136 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005137 LOGD("snapshot request with buffer %p, frame_number %d",
5138 output.buffer, frameNumber);
5139 if (!request->settings) {
5140 rc = channel->request(output.buffer, frameNumber,
5141 NULL, mPrevParameters, indexUsed);
5142 } else {
5143 rc = channel->request(output.buffer, frameNumber,
5144 NULL, mParameters, indexUsed);
5145 }
5146 if (rc < 0) {
5147 LOGE("Fail to request on picture channel");
5148 pthread_mutex_unlock(&mMutex);
5149 return rc;
5150 }
5151
5152 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5153 uint32_t j = 0;
5154 for (j = 0; j < streamsArray.num_streams; j++) {
5155 if (streamsArray.stream_request[j].streamID == streamId) {
5156 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5157 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5158 else
5159 streamsArray.stream_request[j].buf_index = indexUsed;
5160 break;
5161 }
5162 }
5163 if (j == streamsArray.num_streams) {
5164 LOGE("Did not find matching stream to update index");
5165 assert(0);
5166 }
5167
5168 pendingBufferIter->need_metadata = true;
5169 streams_need_metadata++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005170 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005171 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5172 bool needMetadata = false;
5173 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5174 rc = yuvChannel->request(output.buffer, frameNumber,
5175 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5176 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005178 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005179 pthread_mutex_unlock(&mMutex);
5180 return rc;
5181 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005182
5183 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5184 uint32_t j = 0;
5185 for (j = 0; j < streamsArray.num_streams; j++) {
5186 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005187 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5188 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5189 else
5190 streamsArray.stream_request[j].buf_index = indexUsed;
5191 break;
5192 }
5193 }
5194 if (j == streamsArray.num_streams) {
5195 LOGE("Did not find matching stream to update index");
5196 assert(0);
5197 }
5198
5199 pendingBufferIter->need_metadata = needMetadata;
5200 if (needMetadata)
5201 streams_need_metadata += 1;
5202 LOGD("calling YUV channel request, need_metadata is %d",
5203 needMetadata);
5204 } else {
5205 LOGD("request with buffer %p, frame_number %d",
5206 output.buffer, frameNumber);
5207
5208 rc = channel->request(output.buffer, frameNumber, indexUsed);
5209
5210 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5211 uint32_t j = 0;
5212 for (j = 0; j < streamsArray.num_streams; j++) {
5213 if (streamsArray.stream_request[j].streamID == streamId) {
5214 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5215 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5216 else
5217 streamsArray.stream_request[j].buf_index = indexUsed;
5218 break;
5219 }
5220 }
5221 if (j == streamsArray.num_streams) {
5222 LOGE("Did not find matching stream to update index");
5223 assert(0);
5224 }
5225
5226 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5227 && mBatchSize) {
5228 mToBeQueuedVidBufs++;
5229 if (mToBeQueuedVidBufs == mBatchSize) {
5230 channel->queueBatchBuf();
5231 }
5232 }
5233 if (rc < 0) {
5234 LOGE("request failed");
5235 pthread_mutex_unlock(&mMutex);
5236 return rc;
5237 }
5238 }
5239 pendingBufferIter++;
5240 }
5241
5242 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5243 itr++) {
5244 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5245
5246 if (channel == NULL) {
5247 LOGE("invalid channel pointer for stream");
5248 assert(0);
5249 return BAD_VALUE;
5250 }
5251
5252 InternalRequest requestedStream;
5253 requestedStream = (*itr);
5254
5255
5256 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5257 LOGD("snapshot request internally input buffer %p, frame_number %d",
5258 request->input_buffer, frameNumber);
5259 if(request->input_buffer != NULL){
5260 rc = channel->request(NULL, frameNumber,
5261 pInputBuffer, &mReprocMeta, indexUsed, true,
5262 requestedStream.meteringOnly);
5263 if (rc < 0) {
5264 LOGE("Fail to request on picture channel");
5265 pthread_mutex_unlock(&mMutex);
5266 return rc;
5267 }
5268 } else {
5269 LOGD("snapshot request with frame_number %d", frameNumber);
5270 if (!request->settings) {
5271 rc = channel->request(NULL, frameNumber,
5272 NULL, mPrevParameters, indexUsed, true,
5273 requestedStream.meteringOnly);
5274 } else {
5275 rc = channel->request(NULL, frameNumber,
5276 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5277 }
5278 if (rc < 0) {
5279 LOGE("Fail to request on picture channel");
5280 pthread_mutex_unlock(&mMutex);
5281 return rc;
5282 }
5283
5284 if ((*itr).meteringOnly != 1) {
5285 requestedStream.need_metadata = 1;
5286 streams_need_metadata++;
5287 }
5288 }
5289
5290 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5291 uint32_t j = 0;
5292 for (j = 0; j < streamsArray.num_streams; j++) {
5293 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005294 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5295 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5296 else
5297 streamsArray.stream_request[j].buf_index = indexUsed;
5298 break;
5299 }
5300 }
5301 if (j == streamsArray.num_streams) {
5302 LOGE("Did not find matching stream to update index");
5303 assert(0);
5304 }
5305
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005306 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005307 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005308 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005309 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005310 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005311 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005312 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005313
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005314 //If 2 streams have need_metadata set to true, fail the request, unless
5315 //we copy/reference count the metadata buffer
5316 if (streams_need_metadata > 1) {
5317 LOGE("not supporting request in which two streams requires"
5318 " 2 HAL metadata for reprocessing");
5319 pthread_mutex_unlock(&mMutex);
5320 return -EINVAL;
5321 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005322
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005323 if (request->input_buffer == NULL) {
5324 /* Set the parameters to backend:
5325 * - For every request in NORMAL MODE
5326 * - For every request in HFR mode during preview only case
5327 * - Once every batch in HFR mode during video recording
5328 */
5329 if (!mBatchSize ||
5330 (mBatchSize && !isVidBufRequested) ||
5331 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5332 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5333 mBatchSize, isVidBufRequested,
5334 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005336 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5337 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5338 uint32_t m = 0;
5339 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5340 if (streamsArray.stream_request[k].streamID ==
5341 mBatchedStreamsArray.stream_request[m].streamID)
5342 break;
5343 }
5344 if (m == mBatchedStreamsArray.num_streams) {
5345 mBatchedStreamsArray.stream_request\
5346 [mBatchedStreamsArray.num_streams].streamID =
5347 streamsArray.stream_request[k].streamID;
5348 mBatchedStreamsArray.stream_request\
5349 [mBatchedStreamsArray.num_streams].buf_index =
5350 streamsArray.stream_request[k].buf_index;
5351 mBatchedStreamsArray.num_streams =
5352 mBatchedStreamsArray.num_streams + 1;
5353 }
5354 }
5355 streamsArray = mBatchedStreamsArray;
5356 }
5357 /* Update stream id of all the requested buffers */
5358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5359 streamsArray)) {
5360 LOGE("Failed to set stream type mask in the parameters");
5361 return BAD_VALUE;
5362 }
5363
5364 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5365 mParameters);
5366 if (rc < 0) {
5367 LOGE("set_parms failed");
5368 }
5369 /* reset to zero coz, the batch is queued */
5370 mToBeQueuedVidBufs = 0;
5371 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5372 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5373 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005374 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5375 uint32_t m = 0;
5376 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5377 if (streamsArray.stream_request[k].streamID ==
5378 mBatchedStreamsArray.stream_request[m].streamID)
5379 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005380 }
5381 if (m == mBatchedStreamsArray.num_streams) {
5382 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5383 streamID = streamsArray.stream_request[k].streamID;
5384 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5385 buf_index = streamsArray.stream_request[k].buf_index;
5386 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5387 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005388 }
5389 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005392 }
5393
5394 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5395
5396 mState = STARTED;
5397 // Added a timed condition wait
5398 struct timespec ts;
5399 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005400 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 if (rc < 0) {
5402 isValidTimeout = 0;
5403 LOGE("Error reading the real time clock!!");
5404 }
5405 else {
5406 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005407 int64_t timeout = 5;
5408 {
5409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5410 // If there is a pending HDR+ request, the following requests may be blocked until the
5411 // HDR+ request is done. So allow a longer timeout.
5412 if (mHdrPlusPendingRequests.size() > 0) {
5413 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5414 }
5415 }
5416 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005417 }
5418 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005419 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 (mState != ERROR) && (mState != DEINIT)) {
5421 if (!isValidTimeout) {
5422 LOGD("Blocking on conditional wait");
5423 pthread_cond_wait(&mRequestCond, &mMutex);
5424 }
5425 else {
5426 LOGD("Blocking on timed conditional wait");
5427 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5428 if (rc == ETIMEDOUT) {
5429 rc = -ENODEV;
5430 LOGE("Unblocked on timeout!!!!");
5431 break;
5432 }
5433 }
5434 LOGD("Unblocked");
5435 if (mWokenUpByDaemon) {
5436 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005437 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005438 break;
5439 }
5440 }
5441 pthread_mutex_unlock(&mMutex);
5442
5443 return rc;
5444}
5445
5446/*===========================================================================
5447 * FUNCTION : dump
5448 *
5449 * DESCRIPTION:
5450 *
5451 * PARAMETERS :
5452 *
5453 *
5454 * RETURN :
5455 *==========================================================================*/
5456void QCamera3HardwareInterface::dump(int fd)
5457{
5458 pthread_mutex_lock(&mMutex);
5459 dprintf(fd, "\n Camera HAL3 information Begin \n");
5460
5461 dprintf(fd, "\nNumber of pending requests: %zu \n",
5462 mPendingRequestsList.size());
5463 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5464 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5465 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5466 for(pendingRequestIterator i = mPendingRequestsList.begin();
5467 i != mPendingRequestsList.end(); i++) {
5468 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5469 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5470 i->input_buffer);
5471 }
5472 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5473 mPendingBuffersMap.get_num_overall_buffers());
5474 dprintf(fd, "-------+------------------\n");
5475 dprintf(fd, " Frame | Stream type mask \n");
5476 dprintf(fd, "-------+------------------\n");
5477 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5478 for(auto &j : req.mPendingBufferList) {
5479 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5480 dprintf(fd, " %5d | %11d \n",
5481 req.frame_number, channel->getStreamTypeMask());
5482 }
5483 }
5484 dprintf(fd, "-------+------------------\n");
5485
5486 dprintf(fd, "\nPending frame drop list: %zu\n",
5487 mPendingFrameDropList.size());
5488 dprintf(fd, "-------+-----------\n");
5489 dprintf(fd, " Frame | Stream ID \n");
5490 dprintf(fd, "-------+-----------\n");
5491 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5492 i != mPendingFrameDropList.end(); i++) {
5493 dprintf(fd, " %5d | %9d \n",
5494 i->frame_number, i->stream_ID);
5495 }
5496 dprintf(fd, "-------+-----------\n");
5497
5498 dprintf(fd, "\n Camera HAL3 information End \n");
5499
5500 /* use dumpsys media.camera as trigger to send update debug level event */
5501 mUpdateDebugLevel = true;
5502 pthread_mutex_unlock(&mMutex);
5503 return;
5504}
5505
5506/*===========================================================================
5507 * FUNCTION : flush
5508 *
5509 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5510 * conditionally restarts channels
5511 *
5512 * PARAMETERS :
5513 * @ restartChannels: re-start all channels
5514 *
5515 *
5516 * RETURN :
5517 * 0 on success
5518 * Error code on failure
5519 *==========================================================================*/
5520int QCamera3HardwareInterface::flush(bool restartChannels)
5521{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005522 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005523 int32_t rc = NO_ERROR;
5524
5525 LOGD("Unblocking Process Capture Request");
5526 pthread_mutex_lock(&mMutex);
5527 mFlush = true;
5528 pthread_mutex_unlock(&mMutex);
5529
5530 rc = stopAllChannels();
5531 // unlink of dualcam
5532 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005533 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5534 &m_pDualCamCmdPtr->bundle_info;
5535 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005536 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5537 pthread_mutex_lock(&gCamLock);
5538
5539 if (mIsMainCamera == 1) {
5540 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5541 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005542 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005543 // related session id should be session id of linked session
5544 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5545 } else {
5546 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5547 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005548 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5550 }
5551 pthread_mutex_unlock(&gCamLock);
5552
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005553 rc = mCameraHandle->ops->set_dual_cam_cmd(
5554 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005555 if (rc < 0) {
5556 LOGE("Dualcam: Unlink failed, but still proceed to close");
5557 }
5558 }
5559
5560 if (rc < 0) {
5561 LOGE("stopAllChannels failed");
5562 return rc;
5563 }
5564 if (mChannelHandle) {
5565 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5566 mChannelHandle);
5567 }
5568
5569 // Reset bundle info
5570 rc = setBundleInfo();
5571 if (rc < 0) {
5572 LOGE("setBundleInfo failed %d", rc);
5573 return rc;
5574 }
5575
5576 // Mutex Lock
5577 pthread_mutex_lock(&mMutex);
5578
5579 // Unblock process_capture_request
5580 mPendingLiveRequest = 0;
5581 pthread_cond_signal(&mRequestCond);
5582
5583 rc = notifyErrorForPendingRequests();
5584 if (rc < 0) {
5585 LOGE("notifyErrorForPendingRequests failed");
5586 pthread_mutex_unlock(&mMutex);
5587 return rc;
5588 }
5589
5590 mFlush = false;
5591
5592 // Start the Streams/Channels
5593 if (restartChannels) {
5594 rc = startAllChannels();
5595 if (rc < 0) {
5596 LOGE("startAllChannels failed");
5597 pthread_mutex_unlock(&mMutex);
5598 return rc;
5599 }
5600 }
5601
5602 if (mChannelHandle) {
5603 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5604 mChannelHandle);
5605 if (rc < 0) {
5606 LOGE("start_channel failed");
5607 pthread_mutex_unlock(&mMutex);
5608 return rc;
5609 }
5610 }
5611
5612 pthread_mutex_unlock(&mMutex);
5613
5614 return 0;
5615}
5616
5617/*===========================================================================
5618 * FUNCTION : flushPerf
5619 *
5620 * DESCRIPTION: This is the performance optimization version of flush that does
5621 * not use stream off, rather flushes the system
5622 *
5623 * PARAMETERS :
5624 *
5625 *
5626 * RETURN : 0 : success
5627 * -EINVAL: input is malformed (device is not valid)
5628 * -ENODEV: if the device has encountered a serious error
5629 *==========================================================================*/
5630int QCamera3HardwareInterface::flushPerf()
5631{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005632 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005633 int32_t rc = 0;
5634 struct timespec timeout;
5635 bool timed_wait = false;
5636
5637 pthread_mutex_lock(&mMutex);
5638 mFlushPerf = true;
5639 mPendingBuffersMap.numPendingBufsAtFlush =
5640 mPendingBuffersMap.get_num_overall_buffers();
5641 LOGD("Calling flush. Wait for %d buffers to return",
5642 mPendingBuffersMap.numPendingBufsAtFlush);
5643
5644 /* send the flush event to the backend */
5645 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5646 if (rc < 0) {
5647 LOGE("Error in flush: IOCTL failure");
5648 mFlushPerf = false;
5649 pthread_mutex_unlock(&mMutex);
5650 return -ENODEV;
5651 }
5652
5653 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5654 LOGD("No pending buffers in HAL, return flush");
5655 mFlushPerf = false;
5656 pthread_mutex_unlock(&mMutex);
5657 return rc;
5658 }
5659
5660 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005661 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005662 if (rc < 0) {
5663 LOGE("Error reading the real time clock, cannot use timed wait");
5664 } else {
5665 timeout.tv_sec += FLUSH_TIMEOUT;
5666 timed_wait = true;
5667 }
5668
5669 //Block on conditional variable
5670 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5671 LOGD("Waiting on mBuffersCond");
5672 if (!timed_wait) {
5673 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5674 if (rc != 0) {
5675 LOGE("pthread_cond_wait failed due to rc = %s",
5676 strerror(rc));
5677 break;
5678 }
5679 } else {
5680 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5681 if (rc != 0) {
5682 LOGE("pthread_cond_timedwait failed due to rc = %s",
5683 strerror(rc));
5684 break;
5685 }
5686 }
5687 }
5688 if (rc != 0) {
5689 mFlushPerf = false;
5690 pthread_mutex_unlock(&mMutex);
5691 return -ENODEV;
5692 }
5693
5694 LOGD("Received buffers, now safe to return them");
5695
5696 //make sure the channels handle flush
5697 //currently only required for the picture channel to release snapshot resources
5698 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5699 it != mStreamInfo.end(); it++) {
5700 QCamera3Channel *channel = (*it)->channel;
5701 if (channel) {
5702 rc = channel->flush();
5703 if (rc) {
5704 LOGE("Flushing the channels failed with error %d", rc);
5705 // even though the channel flush failed we need to continue and
5706 // return the buffers we have to the framework, however the return
5707 // value will be an error
5708 rc = -ENODEV;
5709 }
5710 }
5711 }
5712
5713 /* notify the frameworks and send errored results */
5714 rc = notifyErrorForPendingRequests();
5715 if (rc < 0) {
5716 LOGE("notifyErrorForPendingRequests failed");
5717 pthread_mutex_unlock(&mMutex);
5718 return rc;
5719 }
5720
5721 //unblock process_capture_request
5722 mPendingLiveRequest = 0;
5723 unblockRequestIfNecessary();
5724
5725 mFlushPerf = false;
5726 pthread_mutex_unlock(&mMutex);
5727 LOGD ("Flush Operation complete. rc = %d", rc);
5728 return rc;
5729}
5730
5731/*===========================================================================
5732 * FUNCTION : handleCameraDeviceError
5733 *
5734 * DESCRIPTION: This function calls internal flush and notifies the error to
5735 * framework and updates the state variable.
5736 *
5737 * PARAMETERS : None
5738 *
5739 * RETURN : NO_ERROR on Success
5740 * Error code on failure
5741 *==========================================================================*/
5742int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5743{
5744 int32_t rc = NO_ERROR;
5745
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005746 {
5747 Mutex::Autolock lock(mFlushLock);
5748 pthread_mutex_lock(&mMutex);
5749 if (mState != ERROR) {
5750 //if mState != ERROR, nothing to be done
5751 pthread_mutex_unlock(&mMutex);
5752 return NO_ERROR;
5753 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005754 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005755
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005756 rc = flush(false /* restart channels */);
5757 if (NO_ERROR != rc) {
5758 LOGE("internal flush to handle mState = ERROR failed");
5759 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005760
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005761 pthread_mutex_lock(&mMutex);
5762 mState = DEINIT;
5763 pthread_mutex_unlock(&mMutex);
5764 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005765
5766 camera3_notify_msg_t notify_msg;
5767 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5768 notify_msg.type = CAMERA3_MSG_ERROR;
5769 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5770 notify_msg.message.error.error_stream = NULL;
5771 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005772 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005773
5774 return rc;
5775}
5776
5777/*===========================================================================
5778 * FUNCTION : captureResultCb
5779 *
5780 * DESCRIPTION: Callback handler for all capture result
5781 * (streams, as well as metadata)
5782 *
5783 * PARAMETERS :
5784 * @metadata : metadata information
5785 * @buffer : actual gralloc buffer to be returned to frameworks.
5786 * NULL if metadata.
5787 *
5788 * RETURN : NONE
5789 *==========================================================================*/
5790void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5791 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5792{
5793 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005794 pthread_mutex_lock(&mMutex);
5795 uint8_t batchSize = mBatchSize;
5796 pthread_mutex_unlock(&mMutex);
5797 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005798 handleBatchMetadata(metadata_buf,
5799 true /* free_and_bufdone_meta_buf */);
5800 } else { /* mBatchSize = 0 */
5801 hdrPlusPerfLock(metadata_buf);
5802 pthread_mutex_lock(&mMutex);
5803 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005804 true /* free_and_bufdone_meta_buf */,
5805 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005806 pthread_mutex_unlock(&mMutex);
5807 }
5808 } else if (isInputBuffer) {
5809 pthread_mutex_lock(&mMutex);
5810 handleInputBufferWithLock(frame_number);
5811 pthread_mutex_unlock(&mMutex);
5812 } else {
5813 pthread_mutex_lock(&mMutex);
5814 handleBufferWithLock(buffer, frame_number);
5815 pthread_mutex_unlock(&mMutex);
5816 }
5817 return;
5818}
5819
5820/*===========================================================================
5821 * FUNCTION : getReprocessibleOutputStreamId
5822 *
5823 * DESCRIPTION: Get source output stream id for the input reprocess stream
5824 * based on size and format, which would be the largest
5825 * output stream if an input stream exists.
5826 *
5827 * PARAMETERS :
5828 * @id : return the stream id if found
5829 *
5830 * RETURN : int32_t type of status
5831 * NO_ERROR -- success
5832 * none-zero failure code
5833 *==========================================================================*/
5834int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5835{
5836 /* check if any output or bidirectional stream with the same size and format
5837 and return that stream */
5838 if ((mInputStreamInfo.dim.width > 0) &&
5839 (mInputStreamInfo.dim.height > 0)) {
5840 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5841 it != mStreamInfo.end(); it++) {
5842
5843 camera3_stream_t *stream = (*it)->stream;
5844 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5845 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5846 (stream->format == mInputStreamInfo.format)) {
5847 // Usage flag for an input stream and the source output stream
5848 // may be different.
5849 LOGD("Found reprocessible output stream! %p", *it);
5850 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5851 stream->usage, mInputStreamInfo.usage);
5852
5853 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5854 if (channel != NULL && channel->mStreams[0]) {
5855 id = channel->mStreams[0]->getMyServerID();
5856 return NO_ERROR;
5857 }
5858 }
5859 }
5860 } else {
5861 LOGD("No input stream, so no reprocessible output stream");
5862 }
5863 return NAME_NOT_FOUND;
5864}
5865
5866/*===========================================================================
5867 * FUNCTION : lookupFwkName
5868 *
5869 * DESCRIPTION: In case the enum is not same in fwk and backend
5870 * make sure the parameter is correctly propogated
5871 *
5872 * PARAMETERS :
5873 * @arr : map between the two enums
5874 * @len : len of the map
5875 * @hal_name : name of the hal_parm to map
5876 *
5877 * RETURN : int type of status
5878 * fwk_name -- success
5879 * none-zero failure code
5880 *==========================================================================*/
5881template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5882 size_t len, halType hal_name)
5883{
5884
5885 for (size_t i = 0; i < len; i++) {
5886 if (arr[i].hal_name == hal_name) {
5887 return arr[i].fwk_name;
5888 }
5889 }
5890
5891 /* Not able to find matching framework type is not necessarily
5892 * an error case. This happens when mm-camera supports more attributes
5893 * than the frameworks do */
5894 LOGH("Cannot find matching framework type");
5895 return NAME_NOT_FOUND;
5896}
5897
5898/*===========================================================================
5899 * FUNCTION : lookupHalName
5900 *
5901 * DESCRIPTION: In case the enum is not same in fwk and backend
5902 * make sure the parameter is correctly propogated
5903 *
5904 * PARAMETERS :
5905 * @arr : map between the two enums
5906 * @len : len of the map
5907 * @fwk_name : name of the hal_parm to map
5908 *
5909 * RETURN : int32_t type of status
5910 * hal_name -- success
5911 * none-zero failure code
5912 *==========================================================================*/
5913template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5914 size_t len, fwkType fwk_name)
5915{
5916 for (size_t i = 0; i < len; i++) {
5917 if (arr[i].fwk_name == fwk_name) {
5918 return arr[i].hal_name;
5919 }
5920 }
5921
5922 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5923 return NAME_NOT_FOUND;
5924}
5925
5926/*===========================================================================
5927 * FUNCTION : lookupProp
5928 *
5929 * DESCRIPTION: lookup a value by its name
5930 *
5931 * PARAMETERS :
5932 * @arr : map between the two enums
5933 * @len : size of the map
5934 * @name : name to be looked up
5935 *
5936 * RETURN : Value if found
5937 * CAM_CDS_MODE_MAX if not found
5938 *==========================================================================*/
5939template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5940 size_t len, const char *name)
5941{
5942 if (name) {
5943 for (size_t i = 0; i < len; i++) {
5944 if (!strcmp(arr[i].desc, name)) {
5945 return arr[i].val;
5946 }
5947 }
5948 }
5949 return CAM_CDS_MODE_MAX;
5950}
5951
5952/*===========================================================================
5953 *
5954 * DESCRIPTION:
5955 *
5956 * PARAMETERS :
5957 * @metadata : metadata information from callback
5958 * @timestamp: metadata buffer timestamp
5959 * @request_id: request id
5960 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005961 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5962 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005963 * @pprocDone: whether internal offline postprocsesing is done
5964 *
5965 * RETURN : camera_metadata_t*
5966 * metadata in a format specified by fwk
5967 *==========================================================================*/
5968camera_metadata_t*
5969QCamera3HardwareInterface::translateFromHalMetadata(
5970 metadata_buffer_t *metadata,
5971 nsecs_t timestamp,
5972 int32_t request_id,
5973 const CameraMetadata& jpegMetadata,
5974 uint8_t pipeline_depth,
5975 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005976 /* DevCamDebug metadata translateFromHalMetadata argument */
5977 uint8_t DevCamDebug_meta_enable,
5978 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005979 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005980 uint8_t fwk_cacMode,
5981 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005982{
5983 CameraMetadata camMetadata;
5984 camera_metadata_t *resultMetadata;
5985
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005986 if (mBatchSize && !firstMetadataInBatch) {
5987 /* In batch mode, use cached metadata from the first metadata
5988 in the batch */
5989 camMetadata.clear();
5990 camMetadata = mCachedMetadata;
5991 }
5992
Thierry Strudel3d639192016-09-09 11:52:26 -07005993 if (jpegMetadata.entryCount())
5994 camMetadata.append(jpegMetadata);
5995
5996 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5997 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5998 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5999 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006000 if (mBatchSize == 0) {
6001 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6002 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006004
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006005 if (mBatchSize && !firstMetadataInBatch) {
6006 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6007 resultMetadata = camMetadata.release();
6008 return resultMetadata;
6009 }
6010
Samuel Ha68ba5172016-12-15 18:41:12 -08006011 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6012 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6013 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6014 // DevCamDebug metadata translateFromHalMetadata AF
6015 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6016 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6017 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6018 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6019 }
6020 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6021 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6022 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6023 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6024 }
6025 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6026 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6027 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6028 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6029 }
6030 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6031 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6032 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6033 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6034 }
6035 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6036 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6037 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6038 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6039 }
6040 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6041 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6042 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6043 *DevCamDebug_af_monitor_pdaf_target_pos;
6044 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6045 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6046 }
6047 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6048 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6049 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6050 *DevCamDebug_af_monitor_pdaf_confidence;
6051 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6052 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6053 }
6054 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6055 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6056 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6057 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6058 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6059 }
6060 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6061 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6062 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6063 *DevCamDebug_af_monitor_tof_target_pos;
6064 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6065 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6066 }
6067 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6068 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6069 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6070 *DevCamDebug_af_monitor_tof_confidence;
6071 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6072 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6073 }
6074 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6075 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6076 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6077 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6078 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6079 }
6080 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6081 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6082 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6083 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6084 &fwk_DevCamDebug_af_monitor_type_select, 1);
6085 }
6086 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6087 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6088 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6089 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6090 &fwk_DevCamDebug_af_monitor_refocus, 1);
6091 }
6092 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6093 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6094 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6095 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6096 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6097 }
6098 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6099 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6100 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6101 *DevCamDebug_af_search_pdaf_target_pos;
6102 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6103 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6104 }
6105 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6106 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6107 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6108 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6109 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6110 }
6111 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6112 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6113 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6114 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6115 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6116 }
6117 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6118 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6119 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6120 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6121 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6122 }
6123 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6124 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6125 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6126 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6127 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6128 }
6129 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6130 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6131 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6132 *DevCamDebug_af_search_tof_target_pos;
6133 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6134 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6135 }
6136 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6137 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6138 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6139 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6140 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6141 }
6142 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6143 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6144 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6145 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6146 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6147 }
6148 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6149 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6150 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6151 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6152 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6153 }
6154 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6155 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6156 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6157 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6158 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6159 }
6160 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6161 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6162 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6163 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6164 &fwk_DevCamDebug_af_search_type_select, 1);
6165 }
6166 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6167 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6168 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6169 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6170 &fwk_DevCamDebug_af_search_next_pos, 1);
6171 }
6172 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6173 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6174 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6175 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6176 &fwk_DevCamDebug_af_search_target_pos, 1);
6177 }
6178 // DevCamDebug metadata translateFromHalMetadata AEC
6179 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6180 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6181 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6182 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6183 }
6184 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6185 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6186 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6187 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6188 }
6189 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6190 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6191 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6192 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6193 }
6194 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6195 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6196 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6197 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6198 }
6199 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6200 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6201 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6202 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6203 }
6204 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6205 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6206 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6207 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6208 }
6209 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6210 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6211 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6212 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6213 }
6214 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6215 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6216 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6217 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6218 }
6219 // DevCamDebug metadata translateFromHalMetadata AWB
6220 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6221 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6222 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6223 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6224 }
6225 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6226 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6227 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6228 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6229 }
6230 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6231 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6232 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6233 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6234 }
6235 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6236 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6237 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6238 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6239 }
6240 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6241 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6242 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6243 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6244 }
6245 }
6246 // atrace_end(ATRACE_TAG_ALWAYS);
6247
Thierry Strudel3d639192016-09-09 11:52:26 -07006248 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6249 int64_t fwk_frame_number = *frame_number;
6250 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6251 }
6252
6253 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6254 int32_t fps_range[2];
6255 fps_range[0] = (int32_t)float_range->min_fps;
6256 fps_range[1] = (int32_t)float_range->max_fps;
6257 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6258 fps_range, 2);
6259 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6260 fps_range[0], fps_range[1]);
6261 }
6262
6263 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6264 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6265 }
6266
6267 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6268 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6269 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6270 *sceneMode);
6271 if (NAME_NOT_FOUND != val) {
6272 uint8_t fwkSceneMode = (uint8_t)val;
6273 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6274 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6275 fwkSceneMode);
6276 }
6277 }
6278
6279 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6280 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6281 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6282 }
6283
6284 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6285 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6286 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6287 }
6288
6289 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6290 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6291 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6292 }
6293
6294 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6295 CAM_INTF_META_EDGE_MODE, metadata) {
6296 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6297 }
6298
6299 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6300 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6301 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6302 }
6303
6304 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6305 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6306 }
6307
6308 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6309 if (0 <= *flashState) {
6310 uint8_t fwk_flashState = (uint8_t) *flashState;
6311 if (!gCamCapability[mCameraId]->flash_available) {
6312 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6313 }
6314 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6315 }
6316 }
6317
6318 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6319 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6320 if (NAME_NOT_FOUND != val) {
6321 uint8_t fwk_flashMode = (uint8_t)val;
6322 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6323 }
6324 }
6325
6326 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6327 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6328 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6329 }
6330
6331 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6332 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6333 }
6334
6335 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6336 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6337 }
6338
6339 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6340 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6341 }
6342
6343 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6344 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6345 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6346 }
6347
6348 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6349 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6350 LOGD("fwk_videoStab = %d", fwk_videoStab);
6351 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6352 } else {
6353 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6354 // and so hardcoding the Video Stab result to OFF mode.
6355 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6356 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006357 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006358 }
6359
6360 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6361 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6362 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6363 }
6364
6365 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6366 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6367 }
6368
Thierry Strudel3d639192016-09-09 11:52:26 -07006369 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6370 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006371 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006372
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006373 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6374 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006375
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006376 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006377 blackLevelAppliedPattern->cam_black_level[0],
6378 blackLevelAppliedPattern->cam_black_level[1],
6379 blackLevelAppliedPattern->cam_black_level[2],
6380 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006381 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6382 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006383
6384#ifndef USE_HAL_3_3
6385 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006386 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6387 // depth space.
6388 fwk_blackLevelInd[0] /= 4.0;
6389 fwk_blackLevelInd[1] /= 4.0;
6390 fwk_blackLevelInd[2] /= 4.0;
6391 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006392 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6393 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006394#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 }
6396
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006397#ifndef USE_HAL_3_3
6398 // Fixed whitelevel is used by ISP/Sensor
6399 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6400 &gCamCapability[mCameraId]->white_level, 1);
6401#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006402
6403 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6404 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6405 int32_t scalerCropRegion[4];
6406 scalerCropRegion[0] = hScalerCropRegion->left;
6407 scalerCropRegion[1] = hScalerCropRegion->top;
6408 scalerCropRegion[2] = hScalerCropRegion->width;
6409 scalerCropRegion[3] = hScalerCropRegion->height;
6410
6411 // Adjust crop region from sensor output coordinate system to active
6412 // array coordinate system.
6413 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6414 scalerCropRegion[2], scalerCropRegion[3]);
6415
6416 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6417 }
6418
6419 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6420 LOGD("sensorExpTime = %lld", *sensorExpTime);
6421 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6422 }
6423
6424 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6425 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6426 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6427 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6428 }
6429
6430 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6431 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6432 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6433 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6434 sensorRollingShutterSkew, 1);
6435 }
6436
6437 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6438 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6439 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6440
6441 //calculate the noise profile based on sensitivity
6442 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6443 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6444 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6445 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6446 noise_profile[i] = noise_profile_S;
6447 noise_profile[i+1] = noise_profile_O;
6448 }
6449 LOGD("noise model entry (S, O) is (%f, %f)",
6450 noise_profile_S, noise_profile_O);
6451 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6452 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6453 }
6454
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006455#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006456 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006457 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006458 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006459 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006460 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6461 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6462 }
6463 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006464#endif
6465
Thierry Strudel3d639192016-09-09 11:52:26 -07006466 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6467 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6468 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6469 }
6470
6471 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6472 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6473 *faceDetectMode);
6474 if (NAME_NOT_FOUND != val) {
6475 uint8_t fwk_faceDetectMode = (uint8_t)val;
6476 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6477
6478 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6479 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6480 CAM_INTF_META_FACE_DETECTION, metadata) {
6481 uint8_t numFaces = MIN(
6482 faceDetectionInfo->num_faces_detected, MAX_ROI);
6483 int32_t faceIds[MAX_ROI];
6484 uint8_t faceScores[MAX_ROI];
6485 int32_t faceRectangles[MAX_ROI * 4];
6486 int32_t faceLandmarks[MAX_ROI * 6];
6487 size_t j = 0, k = 0;
6488
6489 for (size_t i = 0; i < numFaces; i++) {
6490 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6491 // Adjust crop region from sensor output coordinate system to active
6492 // array coordinate system.
6493 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6494 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6495 rect.width, rect.height);
6496
6497 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6498 faceRectangles+j, -1);
6499
6500 j+= 4;
6501 }
6502 if (numFaces <= 0) {
6503 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6504 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6505 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6506 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6507 }
6508
6509 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6510 numFaces);
6511 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6512 faceRectangles, numFaces * 4U);
6513 if (fwk_faceDetectMode ==
6514 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6515 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6516 CAM_INTF_META_FACE_LANDMARK, metadata) {
6517
6518 for (size_t i = 0; i < numFaces; i++) {
6519 // Map the co-ordinate sensor output coordinate system to active
6520 // array coordinate system.
6521 mCropRegionMapper.toActiveArray(
6522 landmarks->face_landmarks[i].left_eye_center.x,
6523 landmarks->face_landmarks[i].left_eye_center.y);
6524 mCropRegionMapper.toActiveArray(
6525 landmarks->face_landmarks[i].right_eye_center.x,
6526 landmarks->face_landmarks[i].right_eye_center.y);
6527 mCropRegionMapper.toActiveArray(
6528 landmarks->face_landmarks[i].mouth_center.x,
6529 landmarks->face_landmarks[i].mouth_center.y);
6530
6531 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006532 k+= TOTAL_LANDMARK_INDICES;
6533 }
6534 } else {
6535 for (size_t i = 0; i < numFaces; i++) {
6536 setInvalidLandmarks(faceLandmarks+k);
6537 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006538 }
6539 }
6540
6541 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6542 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6543 faceLandmarks, numFaces * 6U);
6544 }
6545 }
6546 }
6547 }
6548 }
6549
6550 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6551 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6552 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006553
6554 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6555 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6556 // process histogram statistics info
6557 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6558 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6559 cam_histogram_data_t rHistData, gHistData, bHistData;
6560 memset(&rHistData, 0, sizeof(rHistData));
6561 memset(&gHistData, 0, sizeof(gHistData));
6562 memset(&bHistData, 0, sizeof(bHistData));
6563
6564 switch (stats_data->type) {
6565 case CAM_HISTOGRAM_TYPE_BAYER:
6566 switch (stats_data->bayer_stats.data_type) {
6567 case CAM_STATS_CHANNEL_GR:
6568 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6569 break;
6570 case CAM_STATS_CHANNEL_GB:
6571 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6572 break;
6573 case CAM_STATS_CHANNEL_B:
6574 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6575 break;
6576 case CAM_STATS_CHANNEL_ALL:
6577 rHistData = stats_data->bayer_stats.r_stats;
6578 //Framework expects only 3 channels. So, for now,
6579 //use gb stats for G channel.
6580 gHistData = stats_data->bayer_stats.gb_stats;
6581 bHistData = stats_data->bayer_stats.b_stats;
6582 break;
6583 case CAM_STATS_CHANNEL_Y:
6584 case CAM_STATS_CHANNEL_R:
6585 default:
6586 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6587 break;
6588 }
6589 break;
6590 case CAM_HISTOGRAM_TYPE_YUV:
6591 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6592 break;
6593 }
6594
6595 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6596 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6597 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6598
6599 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6600 }
6601 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006602 }
6603
6604 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6605 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6606 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6607 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6608 }
6609
6610 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6611 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6612 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6613 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6614 }
6615
6616 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6617 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6618 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6619 CAM_MAX_SHADING_MAP_HEIGHT);
6620 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6621 CAM_MAX_SHADING_MAP_WIDTH);
6622 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6623 lensShadingMap->lens_shading, 4U * map_width * map_height);
6624 }
6625
6626 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6627 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6628 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6629 }
6630
6631 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6632 //Populate CAM_INTF_META_TONEMAP_CURVES
6633 /* ch0 = G, ch 1 = B, ch 2 = R*/
6634 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6635 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6636 tonemap->tonemap_points_cnt,
6637 CAM_MAX_TONEMAP_CURVE_SIZE);
6638 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6639 }
6640
6641 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6642 &tonemap->curves[0].tonemap_points[0][0],
6643 tonemap->tonemap_points_cnt * 2);
6644
6645 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6646 &tonemap->curves[1].tonemap_points[0][0],
6647 tonemap->tonemap_points_cnt * 2);
6648
6649 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6650 &tonemap->curves[2].tonemap_points[0][0],
6651 tonemap->tonemap_points_cnt * 2);
6652 }
6653
6654 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6655 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6656 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6657 CC_GAIN_MAX);
6658 }
6659
6660 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6661 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6662 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6663 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6664 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6665 }
6666
6667 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6668 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6669 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6670 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6671 toneCurve->tonemap_points_cnt,
6672 CAM_MAX_TONEMAP_CURVE_SIZE);
6673 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6674 }
6675 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6676 (float*)toneCurve->curve.tonemap_points,
6677 toneCurve->tonemap_points_cnt * 2);
6678 }
6679
6680 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6681 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6682 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6683 predColorCorrectionGains->gains, 4);
6684 }
6685
6686 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6687 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6688 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6689 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6690 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6691 }
6692
6693 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6694 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6695 }
6696
6697 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6698 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6699 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6700 }
6701
6702 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6703 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6704 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6705 }
6706
6707 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6708 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6709 *effectMode);
6710 if (NAME_NOT_FOUND != val) {
6711 uint8_t fwk_effectMode = (uint8_t)val;
6712 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6713 }
6714 }
6715
6716 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6717 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6718 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6719 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6720 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6721 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6722 }
6723 int32_t fwk_testPatternData[4];
6724 fwk_testPatternData[0] = testPatternData->r;
6725 fwk_testPatternData[3] = testPatternData->b;
6726 switch (gCamCapability[mCameraId]->color_arrangement) {
6727 case CAM_FILTER_ARRANGEMENT_RGGB:
6728 case CAM_FILTER_ARRANGEMENT_GRBG:
6729 fwk_testPatternData[1] = testPatternData->gr;
6730 fwk_testPatternData[2] = testPatternData->gb;
6731 break;
6732 case CAM_FILTER_ARRANGEMENT_GBRG:
6733 case CAM_FILTER_ARRANGEMENT_BGGR:
6734 fwk_testPatternData[2] = testPatternData->gr;
6735 fwk_testPatternData[1] = testPatternData->gb;
6736 break;
6737 default:
6738 LOGE("color arrangement %d is not supported",
6739 gCamCapability[mCameraId]->color_arrangement);
6740 break;
6741 }
6742 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6743 }
6744
6745 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6746 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6747 }
6748
6749 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6750 String8 str((const char *)gps_methods);
6751 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6752 }
6753
6754 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6755 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6756 }
6757
6758 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6759 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6760 }
6761
6762 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6763 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6764 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6765 }
6766
6767 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6768 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6769 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6770 }
6771
6772 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6773 int32_t fwk_thumb_size[2];
6774 fwk_thumb_size[0] = thumb_size->width;
6775 fwk_thumb_size[1] = thumb_size->height;
6776 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6777 }
6778
6779 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6780 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6781 privateData,
6782 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6783 }
6784
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006785 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6786 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6787 meteringMode, 1);
6788 }
6789
Thierry Strudel3d639192016-09-09 11:52:26 -07006790 if (metadata->is_tuning_params_valid) {
6791 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6792 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6793 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6794
6795
6796 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6797 sizeof(uint32_t));
6798 data += sizeof(uint32_t);
6799
6800 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6801 sizeof(uint32_t));
6802 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6803 data += sizeof(uint32_t);
6804
6805 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6806 sizeof(uint32_t));
6807 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6808 data += sizeof(uint32_t);
6809
6810 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6811 sizeof(uint32_t));
6812 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6813 data += sizeof(uint32_t);
6814
6815 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6816 sizeof(uint32_t));
6817 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6818 data += sizeof(uint32_t);
6819
6820 metadata->tuning_params.tuning_mod3_data_size = 0;
6821 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6822 sizeof(uint32_t));
6823 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6824 data += sizeof(uint32_t);
6825
6826 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6827 TUNING_SENSOR_DATA_MAX);
6828 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6829 count);
6830 data += count;
6831
6832 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6833 TUNING_VFE_DATA_MAX);
6834 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6835 count);
6836 data += count;
6837
6838 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6839 TUNING_CPP_DATA_MAX);
6840 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6841 count);
6842 data += count;
6843
6844 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6845 TUNING_CAC_DATA_MAX);
6846 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6847 count);
6848 data += count;
6849
6850 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6851 (int32_t *)(void *)tuning_meta_data_blob,
6852 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6853 }
6854
6855 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6856 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6857 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6858 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6859 NEUTRAL_COL_POINTS);
6860 }
6861
6862 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6863 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6864 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6865 }
6866
6867 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6868 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6869 // Adjust crop region from sensor output coordinate system to active
6870 // array coordinate system.
6871 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6872 hAeRegions->rect.width, hAeRegions->rect.height);
6873
6874 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6875 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6876 REGIONS_TUPLE_COUNT);
6877 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6878 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6879 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6880 hAeRegions->rect.height);
6881 }
6882
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006883 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6884 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6885 if (NAME_NOT_FOUND != val) {
6886 uint8_t fwkAfMode = (uint8_t)val;
6887 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6888 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6889 } else {
6890 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6891 val);
6892 }
6893 }
6894
Thierry Strudel3d639192016-09-09 11:52:26 -07006895 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6896 uint8_t fwk_afState = (uint8_t) *afState;
6897 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006898 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006899 }
6900
6901 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6902 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6903 }
6904
6905 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6906 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6907 }
6908
6909 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6910 uint8_t fwk_lensState = *lensState;
6911 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6912 }
6913
6914 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6915 /*af regions*/
6916 int32_t afRegions[REGIONS_TUPLE_COUNT];
6917 // Adjust crop region from sensor output coordinate system to active
6918 // array coordinate system.
6919 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6920 hAfRegions->rect.width, hAfRegions->rect.height);
6921
6922 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6923 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6924 REGIONS_TUPLE_COUNT);
6925 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6926 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6927 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6928 hAfRegions->rect.height);
6929 }
6930
6931 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006932 uint32_t ab_mode = *hal_ab_mode;
6933 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6934 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6935 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6936 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006937 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006938 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006939 if (NAME_NOT_FOUND != val) {
6940 uint8_t fwk_ab_mode = (uint8_t)val;
6941 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6942 }
6943 }
6944
6945 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6946 int val = lookupFwkName(SCENE_MODES_MAP,
6947 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6948 if (NAME_NOT_FOUND != val) {
6949 uint8_t fwkBestshotMode = (uint8_t)val;
6950 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6951 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6952 } else {
6953 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6954 }
6955 }
6956
6957 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6958 uint8_t fwk_mode = (uint8_t) *mode;
6959 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6960 }
6961
6962 /* Constant metadata values to be update*/
6963 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6964 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6965
6966 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6967 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6968
6969 int32_t hotPixelMap[2];
6970 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6971
6972 // CDS
6973 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6974 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6975 }
6976
Thierry Strudel04e026f2016-10-10 11:27:36 -07006977 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6978 int32_t fwk_hdr;
6979 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6980 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6981 } else {
6982 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6983 }
6984 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6985 }
6986
6987 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006988 int32_t fwk_ir = (int32_t) *ir;
6989 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006990 }
6991
Thierry Strudel269c81a2016-10-12 12:13:59 -07006992 // AEC SPEED
6993 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6994 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6995 }
6996
6997 // AWB SPEED
6998 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6999 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7000 }
7001
Thierry Strudel3d639192016-09-09 11:52:26 -07007002 // TNR
7003 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7004 uint8_t tnr_enable = tnr->denoise_enable;
7005 int32_t tnr_process_type = (int32_t)tnr->process_plates;
7006
7007 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7008 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7009 }
7010
7011 // Reprocess crop data
7012 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7013 uint8_t cnt = crop_data->num_of_streams;
7014 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7015 // mm-qcamera-daemon only posts crop_data for streams
7016 // not linked to pproc. So no valid crop metadata is not
7017 // necessarily an error case.
7018 LOGD("No valid crop metadata entries");
7019 } else {
7020 uint32_t reproc_stream_id;
7021 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7022 LOGD("No reprocessible stream found, ignore crop data");
7023 } else {
7024 int rc = NO_ERROR;
7025 Vector<int32_t> roi_map;
7026 int32_t *crop = new int32_t[cnt*4];
7027 if (NULL == crop) {
7028 rc = NO_MEMORY;
7029 }
7030 if (NO_ERROR == rc) {
7031 int32_t streams_found = 0;
7032 for (size_t i = 0; i < cnt; i++) {
7033 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7034 if (pprocDone) {
7035 // HAL already does internal reprocessing,
7036 // either via reprocessing before JPEG encoding,
7037 // or offline postprocessing for pproc bypass case.
7038 crop[0] = 0;
7039 crop[1] = 0;
7040 crop[2] = mInputStreamInfo.dim.width;
7041 crop[3] = mInputStreamInfo.dim.height;
7042 } else {
7043 crop[0] = crop_data->crop_info[i].crop.left;
7044 crop[1] = crop_data->crop_info[i].crop.top;
7045 crop[2] = crop_data->crop_info[i].crop.width;
7046 crop[3] = crop_data->crop_info[i].crop.height;
7047 }
7048 roi_map.add(crop_data->crop_info[i].roi_map.left);
7049 roi_map.add(crop_data->crop_info[i].roi_map.top);
7050 roi_map.add(crop_data->crop_info[i].roi_map.width);
7051 roi_map.add(crop_data->crop_info[i].roi_map.height);
7052 streams_found++;
7053 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7054 crop[0], crop[1], crop[2], crop[3]);
7055 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7056 crop_data->crop_info[i].roi_map.left,
7057 crop_data->crop_info[i].roi_map.top,
7058 crop_data->crop_info[i].roi_map.width,
7059 crop_data->crop_info[i].roi_map.height);
7060 break;
7061
7062 }
7063 }
7064 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7065 &streams_found, 1);
7066 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7067 crop, (size_t)(streams_found * 4));
7068 if (roi_map.array()) {
7069 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7070 roi_map.array(), roi_map.size());
7071 }
7072 }
7073 if (crop) {
7074 delete [] crop;
7075 }
7076 }
7077 }
7078 }
7079
7080 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7081 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7082 // so hardcoding the CAC result to OFF mode.
7083 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7084 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7085 } else {
7086 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7087 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7088 *cacMode);
7089 if (NAME_NOT_FOUND != val) {
7090 uint8_t resultCacMode = (uint8_t)val;
7091 // check whether CAC result from CB is equal to Framework set CAC mode
7092 // If not equal then set the CAC mode came in corresponding request
7093 if (fwk_cacMode != resultCacMode) {
7094 resultCacMode = fwk_cacMode;
7095 }
7096 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7097 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7098 } else {
7099 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7100 }
7101 }
7102 }
7103
7104 // Post blob of cam_cds_data through vendor tag.
7105 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7106 uint8_t cnt = cdsInfo->num_of_streams;
7107 cam_cds_data_t cdsDataOverride;
7108 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7109 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7110 cdsDataOverride.num_of_streams = 1;
7111 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7112 uint32_t reproc_stream_id;
7113 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7114 LOGD("No reprocessible stream found, ignore cds data");
7115 } else {
7116 for (size_t i = 0; i < cnt; i++) {
7117 if (cdsInfo->cds_info[i].stream_id ==
7118 reproc_stream_id) {
7119 cdsDataOverride.cds_info[0].cds_enable =
7120 cdsInfo->cds_info[i].cds_enable;
7121 break;
7122 }
7123 }
7124 }
7125 } else {
7126 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7127 }
7128 camMetadata.update(QCAMERA3_CDS_INFO,
7129 (uint8_t *)&cdsDataOverride,
7130 sizeof(cam_cds_data_t));
7131 }
7132
7133 // Ldaf calibration data
7134 if (!mLdafCalibExist) {
7135 IF_META_AVAILABLE(uint32_t, ldafCalib,
7136 CAM_INTF_META_LDAF_EXIF, metadata) {
7137 mLdafCalibExist = true;
7138 mLdafCalib[0] = ldafCalib[0];
7139 mLdafCalib[1] = ldafCalib[1];
7140 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7141 ldafCalib[0], ldafCalib[1]);
7142 }
7143 }
7144
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007145 // Reprocess and DDM debug data through vendor tag
7146 cam_reprocess_info_t repro_info;
7147 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007148 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7149 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007150 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007151 }
7152 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7153 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007154 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007155 }
7156 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7157 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007158 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007159 }
7160 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7161 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007162 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007163 }
7164 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7165 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007166 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007167 }
7168 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007169 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007170 }
7171 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7172 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007173 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007174 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007175 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7176 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7177 }
7178 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7179 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7180 }
7181 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7182 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007183
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007184 // INSTANT AEC MODE
7185 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7186 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7187 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7188 }
7189
Shuzhen Wange763e802016-03-31 10:24:29 -07007190 // AF scene change
7191 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7192 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7193 }
7194
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007195 /* In batch mode, cache the first metadata in the batch */
7196 if (mBatchSize && firstMetadataInBatch) {
7197 mCachedMetadata.clear();
7198 mCachedMetadata = camMetadata;
7199 }
7200
Thierry Strudel3d639192016-09-09 11:52:26 -07007201 resultMetadata = camMetadata.release();
7202 return resultMetadata;
7203}
7204
7205/*===========================================================================
7206 * FUNCTION : saveExifParams
7207 *
7208 * DESCRIPTION:
7209 *
7210 * PARAMETERS :
7211 * @metadata : metadata information from callback
7212 *
7213 * RETURN : none
7214 *
7215 *==========================================================================*/
7216void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7217{
7218 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7219 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7220 if (mExifParams.debug_params) {
7221 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7222 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7223 }
7224 }
7225 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7226 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7227 if (mExifParams.debug_params) {
7228 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7229 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7230 }
7231 }
7232 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7233 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7234 if (mExifParams.debug_params) {
7235 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7236 mExifParams.debug_params->af_debug_params_valid = TRUE;
7237 }
7238 }
7239 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7240 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7241 if (mExifParams.debug_params) {
7242 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7243 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7244 }
7245 }
7246 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7247 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7248 if (mExifParams.debug_params) {
7249 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7250 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7251 }
7252 }
7253 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7254 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7255 if (mExifParams.debug_params) {
7256 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7257 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7258 }
7259 }
7260 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7261 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7262 if (mExifParams.debug_params) {
7263 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7264 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7265 }
7266 }
7267 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7268 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7269 if (mExifParams.debug_params) {
7270 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7271 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7272 }
7273 }
7274}
7275
7276/*===========================================================================
7277 * FUNCTION : get3AExifParams
7278 *
7279 * DESCRIPTION:
7280 *
7281 * PARAMETERS : none
7282 *
7283 *
7284 * RETURN : mm_jpeg_exif_params_t
7285 *
7286 *==========================================================================*/
7287mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7288{
7289 return mExifParams;
7290}
7291
7292/*===========================================================================
7293 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7294 *
7295 * DESCRIPTION:
7296 *
7297 * PARAMETERS :
7298 * @metadata : metadata information from callback
7299 *
7300 * RETURN : camera_metadata_t*
7301 * metadata in a format specified by fwk
7302 *==========================================================================*/
7303camera_metadata_t*
7304QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7305 (metadata_buffer_t *metadata)
7306{
7307 CameraMetadata camMetadata;
7308 camera_metadata_t *resultMetadata;
7309
7310
7311 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7312 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7313 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7314 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7315 }
7316
7317 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7318 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7319 &aecTrigger->trigger, 1);
7320 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7321 &aecTrigger->trigger_id, 1);
7322 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7323 aecTrigger->trigger);
7324 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7325 aecTrigger->trigger_id);
7326 }
7327
7328 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7329 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7330 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7331 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7332 }
7333
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7335 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7336 &af_trigger->trigger, 1);
7337 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7338 af_trigger->trigger);
7339 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7340 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7341 af_trigger->trigger_id);
7342 }
7343
7344 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7345 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7346 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7347 if (NAME_NOT_FOUND != val) {
7348 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7349 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7350 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7351 } else {
7352 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7353 }
7354 }
7355
7356 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7357 uint32_t aeMode = CAM_AE_MODE_MAX;
7358 int32_t flashMode = CAM_FLASH_MODE_MAX;
7359 int32_t redeye = -1;
7360 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7361 aeMode = *pAeMode;
7362 }
7363 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7364 flashMode = *pFlashMode;
7365 }
7366 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7367 redeye = *pRedeye;
7368 }
7369
7370 if (1 == redeye) {
7371 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7372 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7373 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7374 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7375 flashMode);
7376 if (NAME_NOT_FOUND != val) {
7377 fwk_aeMode = (uint8_t)val;
7378 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7379 } else {
7380 LOGE("Unsupported flash mode %d", flashMode);
7381 }
7382 } else if (aeMode == CAM_AE_MODE_ON) {
7383 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7384 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7385 } else if (aeMode == CAM_AE_MODE_OFF) {
7386 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7387 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7388 } else {
7389 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7390 "flashMode:%d, aeMode:%u!!!",
7391 redeye, flashMode, aeMode);
7392 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007393 if (mInstantAEC) {
7394 // Increment frame Idx count untill a bound reached for instant AEC.
7395 mInstantAecFrameIdxCount++;
7396 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7397 CAM_INTF_META_AEC_INFO, metadata) {
7398 LOGH("ae_params->settled = %d",ae_params->settled);
7399 // If AEC settled, or if number of frames reached bound value,
7400 // should reset instant AEC.
7401 if (ae_params->settled ||
7402 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7403 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7404 mInstantAEC = false;
7405 mResetInstantAEC = true;
7406 mInstantAecFrameIdxCount = 0;
7407 }
7408 }
7409 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007410 resultMetadata = camMetadata.release();
7411 return resultMetadata;
7412}
7413
7414/*===========================================================================
7415 * FUNCTION : dumpMetadataToFile
7416 *
7417 * DESCRIPTION: Dumps tuning metadata to file system
7418 *
7419 * PARAMETERS :
7420 * @meta : tuning metadata
7421 * @dumpFrameCount : current dump frame count
7422 * @enabled : Enable mask
7423 *
7424 *==========================================================================*/
7425void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7426 uint32_t &dumpFrameCount,
7427 bool enabled,
7428 const char *type,
7429 uint32_t frameNumber)
7430{
7431 //Some sanity checks
7432 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7433 LOGE("Tuning sensor data size bigger than expected %d: %d",
7434 meta.tuning_sensor_data_size,
7435 TUNING_SENSOR_DATA_MAX);
7436 return;
7437 }
7438
7439 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7440 LOGE("Tuning VFE data size bigger than expected %d: %d",
7441 meta.tuning_vfe_data_size,
7442 TUNING_VFE_DATA_MAX);
7443 return;
7444 }
7445
7446 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7447 LOGE("Tuning CPP data size bigger than expected %d: %d",
7448 meta.tuning_cpp_data_size,
7449 TUNING_CPP_DATA_MAX);
7450 return;
7451 }
7452
7453 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7454 LOGE("Tuning CAC data size bigger than expected %d: %d",
7455 meta.tuning_cac_data_size,
7456 TUNING_CAC_DATA_MAX);
7457 return;
7458 }
7459 //
7460
7461 if(enabled){
7462 char timeBuf[FILENAME_MAX];
7463 char buf[FILENAME_MAX];
7464 memset(buf, 0, sizeof(buf));
7465 memset(timeBuf, 0, sizeof(timeBuf));
7466 time_t current_time;
7467 struct tm * timeinfo;
7468 time (&current_time);
7469 timeinfo = localtime (&current_time);
7470 if (timeinfo != NULL) {
7471 strftime (timeBuf, sizeof(timeBuf),
7472 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7473 }
7474 String8 filePath(timeBuf);
7475 snprintf(buf,
7476 sizeof(buf),
7477 "%dm_%s_%d.bin",
7478 dumpFrameCount,
7479 type,
7480 frameNumber);
7481 filePath.append(buf);
7482 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7483 if (file_fd >= 0) {
7484 ssize_t written_len = 0;
7485 meta.tuning_data_version = TUNING_DATA_VERSION;
7486 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7487 written_len += write(file_fd, data, sizeof(uint32_t));
7488 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7489 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7490 written_len += write(file_fd, data, sizeof(uint32_t));
7491 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7492 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7493 written_len += write(file_fd, data, sizeof(uint32_t));
7494 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7495 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7496 written_len += write(file_fd, data, sizeof(uint32_t));
7497 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7498 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7499 written_len += write(file_fd, data, sizeof(uint32_t));
7500 meta.tuning_mod3_data_size = 0;
7501 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7502 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7503 written_len += write(file_fd, data, sizeof(uint32_t));
7504 size_t total_size = meta.tuning_sensor_data_size;
7505 data = (void *)((uint8_t *)&meta.data);
7506 written_len += write(file_fd, data, total_size);
7507 total_size = meta.tuning_vfe_data_size;
7508 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7509 written_len += write(file_fd, data, total_size);
7510 total_size = meta.tuning_cpp_data_size;
7511 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7512 written_len += write(file_fd, data, total_size);
7513 total_size = meta.tuning_cac_data_size;
7514 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7515 written_len += write(file_fd, data, total_size);
7516 close(file_fd);
7517 }else {
7518 LOGE("fail to open file for metadata dumping");
7519 }
7520 }
7521}
7522
7523/*===========================================================================
7524 * FUNCTION : cleanAndSortStreamInfo
7525 *
7526 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7527 * and sort them such that raw stream is at the end of the list
7528 * This is a workaround for camera daemon constraint.
7529 *
7530 * PARAMETERS : None
7531 *
7532 *==========================================================================*/
7533void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7534{
7535 List<stream_info_t *> newStreamInfo;
7536
7537 /*clean up invalid streams*/
7538 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7539 it != mStreamInfo.end();) {
7540 if(((*it)->status) == INVALID){
7541 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7542 delete channel;
7543 free(*it);
7544 it = mStreamInfo.erase(it);
7545 } else {
7546 it++;
7547 }
7548 }
7549
7550 // Move preview/video/callback/snapshot streams into newList
7551 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7552 it != mStreamInfo.end();) {
7553 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7554 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7555 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7556 newStreamInfo.push_back(*it);
7557 it = mStreamInfo.erase(it);
7558 } else
7559 it++;
7560 }
7561 // Move raw streams into newList
7562 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7563 it != mStreamInfo.end();) {
7564 newStreamInfo.push_back(*it);
7565 it = mStreamInfo.erase(it);
7566 }
7567
7568 mStreamInfo = newStreamInfo;
7569}
7570
7571/*===========================================================================
7572 * FUNCTION : extractJpegMetadata
7573 *
7574 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7575 * JPEG metadata is cached in HAL, and return as part of capture
7576 * result when metadata is returned from camera daemon.
7577 *
7578 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7579 * @request: capture request
7580 *
7581 *==========================================================================*/
7582void QCamera3HardwareInterface::extractJpegMetadata(
7583 CameraMetadata& jpegMetadata,
7584 const camera3_capture_request_t *request)
7585{
7586 CameraMetadata frame_settings;
7587 frame_settings = request->settings;
7588
7589 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7590 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7591 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7592 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7593
7594 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7595 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7596 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7597 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7598
7599 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7600 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7601 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7602 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7603
7604 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7605 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7606 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7607 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7608
7609 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7610 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7611 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7612 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7613
7614 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7615 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7616 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7617 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7618
7619 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7620 int32_t thumbnail_size[2];
7621 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7622 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7623 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7624 int32_t orientation =
7625 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007626 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007627 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7628 int32_t temp;
7629 temp = thumbnail_size[0];
7630 thumbnail_size[0] = thumbnail_size[1];
7631 thumbnail_size[1] = temp;
7632 }
7633 }
7634 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7635 thumbnail_size,
7636 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7637 }
7638
7639}
7640
7641/*===========================================================================
7642 * FUNCTION : convertToRegions
7643 *
7644 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7645 *
7646 * PARAMETERS :
7647 * @rect : cam_rect_t struct to convert
7648 * @region : int32_t destination array
7649 * @weight : if we are converting from cam_area_t, weight is valid
7650 * else weight = -1
7651 *
7652 *==========================================================================*/
7653void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7654 int32_t *region, int weight)
7655{
7656 region[0] = rect.left;
7657 region[1] = rect.top;
7658 region[2] = rect.left + rect.width;
7659 region[3] = rect.top + rect.height;
7660 if (weight > -1) {
7661 region[4] = weight;
7662 }
7663}
7664
7665/*===========================================================================
7666 * FUNCTION : convertFromRegions
7667 *
7668 * DESCRIPTION: helper method to convert from array to cam_rect_t
7669 *
7670 * PARAMETERS :
7671 * @rect : cam_rect_t struct to convert
7672 * @region : int32_t destination array
7673 * @weight : if we are converting from cam_area_t, weight is valid
7674 * else weight = -1
7675 *
7676 *==========================================================================*/
7677void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7678 const camera_metadata_t *settings, uint32_t tag)
7679{
7680 CameraMetadata frame_settings;
7681 frame_settings = settings;
7682 int32_t x_min = frame_settings.find(tag).data.i32[0];
7683 int32_t y_min = frame_settings.find(tag).data.i32[1];
7684 int32_t x_max = frame_settings.find(tag).data.i32[2];
7685 int32_t y_max = frame_settings.find(tag).data.i32[3];
7686 roi.weight = frame_settings.find(tag).data.i32[4];
7687 roi.rect.left = x_min;
7688 roi.rect.top = y_min;
7689 roi.rect.width = x_max - x_min;
7690 roi.rect.height = y_max - y_min;
7691}
7692
7693/*===========================================================================
7694 * FUNCTION : resetIfNeededROI
7695 *
7696 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7697 * crop region
7698 *
7699 * PARAMETERS :
7700 * @roi : cam_area_t struct to resize
7701 * @scalerCropRegion : cam_crop_region_t region to compare against
7702 *
7703 *
7704 *==========================================================================*/
7705bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7706 const cam_crop_region_t* scalerCropRegion)
7707{
7708 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7709 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7710 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7711 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7712
7713 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7714 * without having this check the calculations below to validate if the roi
7715 * is inside scalar crop region will fail resulting in the roi not being
7716 * reset causing algorithm to continue to use stale roi window
7717 */
7718 if (roi->weight == 0) {
7719 return true;
7720 }
7721
7722 if ((roi_x_max < scalerCropRegion->left) ||
7723 // right edge of roi window is left of scalar crop's left edge
7724 (roi_y_max < scalerCropRegion->top) ||
7725 // bottom edge of roi window is above scalar crop's top edge
7726 (roi->rect.left > crop_x_max) ||
7727 // left edge of roi window is beyond(right) of scalar crop's right edge
7728 (roi->rect.top > crop_y_max)){
7729 // top edge of roi windo is above scalar crop's top edge
7730 return false;
7731 }
7732 if (roi->rect.left < scalerCropRegion->left) {
7733 roi->rect.left = scalerCropRegion->left;
7734 }
7735 if (roi->rect.top < scalerCropRegion->top) {
7736 roi->rect.top = scalerCropRegion->top;
7737 }
7738 if (roi_x_max > crop_x_max) {
7739 roi_x_max = crop_x_max;
7740 }
7741 if (roi_y_max > crop_y_max) {
7742 roi_y_max = crop_y_max;
7743 }
7744 roi->rect.width = roi_x_max - roi->rect.left;
7745 roi->rect.height = roi_y_max - roi->rect.top;
7746 return true;
7747}
7748
7749/*===========================================================================
7750 * FUNCTION : convertLandmarks
7751 *
7752 * DESCRIPTION: helper method to extract the landmarks from face detection info
7753 *
7754 * PARAMETERS :
7755 * @landmark_data : input landmark data to be converted
7756 * @landmarks : int32_t destination array
7757 *
7758 *
7759 *==========================================================================*/
7760void QCamera3HardwareInterface::convertLandmarks(
7761 cam_face_landmarks_info_t landmark_data,
7762 int32_t *landmarks)
7763{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007764 if (landmark_data.is_left_eye_valid) {
7765 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7766 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7767 } else {
7768 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7769 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7770 }
7771
7772 if (landmark_data.is_right_eye_valid) {
7773 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7774 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7775 } else {
7776 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7777 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7778 }
7779
7780 if (landmark_data.is_mouth_valid) {
7781 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7782 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7783 } else {
7784 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7785 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7786 }
7787}
7788
7789/*===========================================================================
7790 * FUNCTION : setInvalidLandmarks
7791 *
7792 * DESCRIPTION: helper method to set invalid landmarks
7793 *
7794 * PARAMETERS :
7795 * @landmarks : int32_t destination array
7796 *
7797 *
7798 *==========================================================================*/
7799void QCamera3HardwareInterface::setInvalidLandmarks(
7800 int32_t *landmarks)
7801{
7802 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7803 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7804 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7805 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7806 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7807 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007808}
7809
7810#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007811
7812/*===========================================================================
7813 * FUNCTION : getCapabilities
7814 *
7815 * DESCRIPTION: query camera capability from back-end
7816 *
7817 * PARAMETERS :
7818 * @ops : mm-interface ops structure
7819 * @cam_handle : camera handle for which we need capability
7820 *
7821 * RETURN : ptr type of capability structure
7822 * capability for success
7823 * NULL for failure
7824 *==========================================================================*/
7825cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7826 uint32_t cam_handle)
7827{
7828 int rc = NO_ERROR;
7829 QCamera3HeapMemory *capabilityHeap = NULL;
7830 cam_capability_t *cap_ptr = NULL;
7831
7832 if (ops == NULL) {
7833 LOGE("Invalid arguments");
7834 return NULL;
7835 }
7836
7837 capabilityHeap = new QCamera3HeapMemory(1);
7838 if (capabilityHeap == NULL) {
7839 LOGE("creation of capabilityHeap failed");
7840 return NULL;
7841 }
7842
7843 /* Allocate memory for capability buffer */
7844 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7845 if(rc != OK) {
7846 LOGE("No memory for cappability");
7847 goto allocate_failed;
7848 }
7849
7850 /* Map memory for capability buffer */
7851 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7852
7853 rc = ops->map_buf(cam_handle,
7854 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7855 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7856 if(rc < 0) {
7857 LOGE("failed to map capability buffer");
7858 rc = FAILED_TRANSACTION;
7859 goto map_failed;
7860 }
7861
7862 /* Query Capability */
7863 rc = ops->query_capability(cam_handle);
7864 if(rc < 0) {
7865 LOGE("failed to query capability");
7866 rc = FAILED_TRANSACTION;
7867 goto query_failed;
7868 }
7869
7870 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7871 if (cap_ptr == NULL) {
7872 LOGE("out of memory");
7873 rc = NO_MEMORY;
7874 goto query_failed;
7875 }
7876
7877 memset(cap_ptr, 0, sizeof(cam_capability_t));
7878 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7879
7880 int index;
7881 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7882 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7883 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7884 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7885 }
7886
7887query_failed:
7888 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7889map_failed:
7890 capabilityHeap->deallocate();
7891allocate_failed:
7892 delete capabilityHeap;
7893
7894 if (rc != NO_ERROR) {
7895 return NULL;
7896 } else {
7897 return cap_ptr;
7898 }
7899}
7900
Thierry Strudel3d639192016-09-09 11:52:26 -07007901/*===========================================================================
7902 * FUNCTION : initCapabilities
7903 *
7904 * DESCRIPTION: initialize camera capabilities in static data struct
7905 *
7906 * PARAMETERS :
7907 * @cameraId : camera Id
7908 *
7909 * RETURN : int32_t type of status
7910 * NO_ERROR -- success
7911 * none-zero failure code
7912 *==========================================================================*/
7913int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7914{
7915 int rc = 0;
7916 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007917 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007918
7919 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7920 if (rc) {
7921 LOGE("camera_open failed. rc = %d", rc);
7922 goto open_failed;
7923 }
7924 if (!cameraHandle) {
7925 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7926 goto open_failed;
7927 }
7928
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007929 handle = get_main_camera_handle(cameraHandle->camera_handle);
7930 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7931 if (gCamCapability[cameraId] == NULL) {
7932 rc = FAILED_TRANSACTION;
7933 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007934 }
7935
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007936 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007937 if (is_dual_camera_by_idx(cameraId)) {
7938 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7939 gCamCapability[cameraId]->aux_cam_cap =
7940 getCapabilities(cameraHandle->ops, handle);
7941 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7942 rc = FAILED_TRANSACTION;
7943 free(gCamCapability[cameraId]);
7944 goto failed_op;
7945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007946
7947 // Copy the main camera capability to main_cam_cap struct
7948 gCamCapability[cameraId]->main_cam_cap =
7949 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7950 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7951 LOGE("out of memory");
7952 rc = NO_MEMORY;
7953 goto failed_op;
7954 }
7955 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7956 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007957 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007958failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007959 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7960 cameraHandle = NULL;
7961open_failed:
7962 return rc;
7963}
7964
7965/*==========================================================================
7966 * FUNCTION : get3Aversion
7967 *
7968 * DESCRIPTION: get the Q3A S/W version
7969 *
7970 * PARAMETERS :
7971 * @sw_version: Reference of Q3A structure which will hold version info upon
7972 * return
7973 *
7974 * RETURN : None
7975 *
7976 *==========================================================================*/
7977void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7978{
7979 if(gCamCapability[mCameraId])
7980 sw_version = gCamCapability[mCameraId]->q3a_version;
7981 else
7982 LOGE("Capability structure NULL!");
7983}
7984
7985
7986/*===========================================================================
7987 * FUNCTION : initParameters
7988 *
7989 * DESCRIPTION: initialize camera parameters
7990 *
7991 * PARAMETERS :
7992 *
7993 * RETURN : int32_t type of status
7994 * NO_ERROR -- success
7995 * none-zero failure code
7996 *==========================================================================*/
7997int QCamera3HardwareInterface::initParameters()
7998{
7999 int rc = 0;
8000
8001 //Allocate Set Param Buffer
8002 mParamHeap = new QCamera3HeapMemory(1);
8003 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8004 if(rc != OK) {
8005 rc = NO_MEMORY;
8006 LOGE("Failed to allocate SETPARM Heap memory");
8007 delete mParamHeap;
8008 mParamHeap = NULL;
8009 return rc;
8010 }
8011
8012 //Map memory for parameters buffer
8013 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8014 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8015 mParamHeap->getFd(0),
8016 sizeof(metadata_buffer_t),
8017 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8018 if(rc < 0) {
8019 LOGE("failed to map SETPARM buffer");
8020 rc = FAILED_TRANSACTION;
8021 mParamHeap->deallocate();
8022 delete mParamHeap;
8023 mParamHeap = NULL;
8024 return rc;
8025 }
8026
8027 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8028
8029 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8030 return rc;
8031}
8032
8033/*===========================================================================
8034 * FUNCTION : deinitParameters
8035 *
8036 * DESCRIPTION: de-initialize camera parameters
8037 *
8038 * PARAMETERS :
8039 *
8040 * RETURN : NONE
8041 *==========================================================================*/
8042void QCamera3HardwareInterface::deinitParameters()
8043{
8044 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8045 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8046
8047 mParamHeap->deallocate();
8048 delete mParamHeap;
8049 mParamHeap = NULL;
8050
8051 mParameters = NULL;
8052
8053 free(mPrevParameters);
8054 mPrevParameters = NULL;
8055}
8056
8057/*===========================================================================
8058 * FUNCTION : calcMaxJpegSize
8059 *
8060 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8061 *
8062 * PARAMETERS :
8063 *
8064 * RETURN : max_jpeg_size
8065 *==========================================================================*/
8066size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8067{
8068 size_t max_jpeg_size = 0;
8069 size_t temp_width, temp_height;
8070 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8071 MAX_SIZES_CNT);
8072 for (size_t i = 0; i < count; i++) {
8073 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8074 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8075 if (temp_width * temp_height > max_jpeg_size ) {
8076 max_jpeg_size = temp_width * temp_height;
8077 }
8078 }
8079 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8080 return max_jpeg_size;
8081}
8082
8083/*===========================================================================
8084 * FUNCTION : getMaxRawSize
8085 *
8086 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8087 *
8088 * PARAMETERS :
8089 *
8090 * RETURN : Largest supported Raw Dimension
8091 *==========================================================================*/
8092cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8093{
8094 int max_width = 0;
8095 cam_dimension_t maxRawSize;
8096
8097 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8098 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8099 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8100 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8101 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8102 }
8103 }
8104 return maxRawSize;
8105}
8106
8107
8108/*===========================================================================
8109 * FUNCTION : calcMaxJpegDim
8110 *
8111 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8112 *
8113 * PARAMETERS :
8114 *
8115 * RETURN : max_jpeg_dim
8116 *==========================================================================*/
8117cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8118{
8119 cam_dimension_t max_jpeg_dim;
8120 cam_dimension_t curr_jpeg_dim;
8121 max_jpeg_dim.width = 0;
8122 max_jpeg_dim.height = 0;
8123 curr_jpeg_dim.width = 0;
8124 curr_jpeg_dim.height = 0;
8125 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8126 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8127 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8128 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8129 max_jpeg_dim.width * max_jpeg_dim.height ) {
8130 max_jpeg_dim.width = curr_jpeg_dim.width;
8131 max_jpeg_dim.height = curr_jpeg_dim.height;
8132 }
8133 }
8134 return max_jpeg_dim;
8135}
8136
8137/*===========================================================================
8138 * FUNCTION : addStreamConfig
8139 *
8140 * DESCRIPTION: adds the stream configuration to the array
8141 *
8142 * PARAMETERS :
8143 * @available_stream_configs : pointer to stream configuration array
8144 * @scalar_format : scalar format
8145 * @dim : configuration dimension
8146 * @config_type : input or output configuration type
8147 *
8148 * RETURN : NONE
8149 *==========================================================================*/
8150void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8151 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8152{
8153 available_stream_configs.add(scalar_format);
8154 available_stream_configs.add(dim.width);
8155 available_stream_configs.add(dim.height);
8156 available_stream_configs.add(config_type);
8157}
8158
8159/*===========================================================================
8160 * FUNCTION : suppportBurstCapture
8161 *
8162 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8163 *
8164 * PARAMETERS :
8165 * @cameraId : camera Id
8166 *
8167 * RETURN : true if camera supports BURST_CAPTURE
8168 * false otherwise
8169 *==========================================================================*/
8170bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8171{
8172 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8173 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8174 const int32_t highResWidth = 3264;
8175 const int32_t highResHeight = 2448;
8176
8177 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8178 // Maximum resolution images cannot be captured at >= 10fps
8179 // -> not supporting BURST_CAPTURE
8180 return false;
8181 }
8182
8183 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8184 // Maximum resolution images can be captured at >= 20fps
8185 // --> supporting BURST_CAPTURE
8186 return true;
8187 }
8188
8189 // Find the smallest highRes resolution, or largest resolution if there is none
8190 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8191 MAX_SIZES_CNT);
8192 size_t highRes = 0;
8193 while ((highRes + 1 < totalCnt) &&
8194 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8195 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8196 highResWidth * highResHeight)) {
8197 highRes++;
8198 }
8199 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8200 return true;
8201 } else {
8202 return false;
8203 }
8204}
8205
8206/*===========================================================================
8207 * FUNCTION : initStaticMetadata
8208 *
8209 * DESCRIPTION: initialize the static metadata
8210 *
8211 * PARAMETERS :
8212 * @cameraId : camera Id
8213 *
8214 * RETURN : int32_t type of status
8215 * 0 -- success
8216 * non-zero failure code
8217 *==========================================================================*/
8218int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8219{
8220 int rc = 0;
8221 CameraMetadata staticInfo;
8222 size_t count = 0;
8223 bool limitedDevice = false;
8224 char prop[PROPERTY_VALUE_MAX];
8225 bool supportBurst = false;
8226
8227 supportBurst = supportBurstCapture(cameraId);
8228
8229 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8230 * guaranteed or if min fps of max resolution is less than 20 fps, its
8231 * advertised as limited device*/
8232 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8233 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8234 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8235 !supportBurst;
8236
8237 uint8_t supportedHwLvl = limitedDevice ?
8238 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008239#ifndef USE_HAL_3_3
8240 // LEVEL_3 - This device will support level 3.
8241 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8242#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008243 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008244#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008245
8246 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8247 &supportedHwLvl, 1);
8248
8249 bool facingBack = false;
8250 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8251 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8252 facingBack = true;
8253 }
8254 /*HAL 3 only*/
8255 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8256 &gCamCapability[cameraId]->min_focus_distance, 1);
8257
8258 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8259 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8260
8261 /*should be using focal lengths but sensor doesn't provide that info now*/
8262 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8263 &gCamCapability[cameraId]->focal_length,
8264 1);
8265
8266 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8267 gCamCapability[cameraId]->apertures,
8268 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8269
8270 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8271 gCamCapability[cameraId]->filter_densities,
8272 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8273
8274
8275 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8276 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8277 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8278
8279 int32_t lens_shading_map_size[] = {
8280 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8281 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8282 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8283 lens_shading_map_size,
8284 sizeof(lens_shading_map_size)/sizeof(int32_t));
8285
8286 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8287 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8288
8289 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8290 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8291
8292 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8293 &gCamCapability[cameraId]->max_frame_duration, 1);
8294
8295 camera_metadata_rational baseGainFactor = {
8296 gCamCapability[cameraId]->base_gain_factor.numerator,
8297 gCamCapability[cameraId]->base_gain_factor.denominator};
8298 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8299 &baseGainFactor, 1);
8300
8301 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8302 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8303
8304 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8305 gCamCapability[cameraId]->pixel_array_size.height};
8306 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8307 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8308
8309 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8310 gCamCapability[cameraId]->active_array_size.top,
8311 gCamCapability[cameraId]->active_array_size.width,
8312 gCamCapability[cameraId]->active_array_size.height};
8313 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8314 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8315
8316 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8317 &gCamCapability[cameraId]->white_level, 1);
8318
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008319 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8320 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8321 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008322 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008323 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008324
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008325#ifndef USE_HAL_3_3
8326 bool hasBlackRegions = false;
8327 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8328 LOGW("black_region_count: %d is bounded to %d",
8329 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8330 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8331 }
8332 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8333 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8334 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8335 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8336 }
8337 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8338 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8339 hasBlackRegions = true;
8340 }
8341#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008342 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8343 &gCamCapability[cameraId]->flash_charge_duration, 1);
8344
8345 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8346 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8347
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008348 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8349 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8350 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008351 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8352 &timestampSource, 1);
8353
8354 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8355 &gCamCapability[cameraId]->histogram_size, 1);
8356
8357 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8358 &gCamCapability[cameraId]->max_histogram_count, 1);
8359
8360 int32_t sharpness_map_size[] = {
8361 gCamCapability[cameraId]->sharpness_map_size.width,
8362 gCamCapability[cameraId]->sharpness_map_size.height};
8363
8364 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8365 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8366
8367 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8368 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8369
8370 int32_t scalar_formats[] = {
8371 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8372 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8373 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8374 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8375 HAL_PIXEL_FORMAT_RAW10,
8376 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8377 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8378 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8379 scalar_formats,
8380 scalar_formats_count);
8381
8382 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8383 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8384 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8385 count, MAX_SIZES_CNT, available_processed_sizes);
8386 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8387 available_processed_sizes, count * 2);
8388
8389 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8390 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8391 makeTable(gCamCapability[cameraId]->raw_dim,
8392 count, MAX_SIZES_CNT, available_raw_sizes);
8393 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8394 available_raw_sizes, count * 2);
8395
8396 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8397 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8398 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8399 count, MAX_SIZES_CNT, available_fps_ranges);
8400 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8401 available_fps_ranges, count * 2);
8402
8403 camera_metadata_rational exposureCompensationStep = {
8404 gCamCapability[cameraId]->exp_compensation_step.numerator,
8405 gCamCapability[cameraId]->exp_compensation_step.denominator};
8406 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8407 &exposureCompensationStep, 1);
8408
8409 Vector<uint8_t> availableVstabModes;
8410 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8411 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008412 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008413 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008414 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008415 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008416 count = IS_TYPE_MAX;
8417 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8418 for (size_t i = 0; i < count; i++) {
8419 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8420 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8421 eisSupported = true;
8422 break;
8423 }
8424 }
8425 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008426 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8427 }
8428 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8429 availableVstabModes.array(), availableVstabModes.size());
8430
8431 /*HAL 1 and HAL 3 common*/
8432 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8433 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8434 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8435 float maxZoom = maxZoomStep/minZoomStep;
8436 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8437 &maxZoom, 1);
8438
8439 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8440 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8441
8442 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8443 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8444 max3aRegions[2] = 0; /* AF not supported */
8445 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8446 max3aRegions, 3);
8447
8448 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8449 memset(prop, 0, sizeof(prop));
8450 property_get("persist.camera.facedetect", prop, "1");
8451 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8452 LOGD("Support face detection mode: %d",
8453 supportedFaceDetectMode);
8454
8455 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008456 /* support mode should be OFF if max number of face is 0 */
8457 if (maxFaces <= 0) {
8458 supportedFaceDetectMode = 0;
8459 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008460 Vector<uint8_t> availableFaceDetectModes;
8461 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8462 if (supportedFaceDetectMode == 1) {
8463 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8464 } else if (supportedFaceDetectMode == 2) {
8465 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8466 } else if (supportedFaceDetectMode == 3) {
8467 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8468 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8469 } else {
8470 maxFaces = 0;
8471 }
8472 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8473 availableFaceDetectModes.array(),
8474 availableFaceDetectModes.size());
8475 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8476 (int32_t *)&maxFaces, 1);
8477
8478 int32_t exposureCompensationRange[] = {
8479 gCamCapability[cameraId]->exposure_compensation_min,
8480 gCamCapability[cameraId]->exposure_compensation_max};
8481 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8482 exposureCompensationRange,
8483 sizeof(exposureCompensationRange)/sizeof(int32_t));
8484
8485 uint8_t lensFacing = (facingBack) ?
8486 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8487 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8488
8489 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8490 available_thumbnail_sizes,
8491 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8492
8493 /*all sizes will be clubbed into this tag*/
8494 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8495 /*android.scaler.availableStreamConfigurations*/
8496 Vector<int32_t> available_stream_configs;
8497 cam_dimension_t active_array_dim;
8498 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8499 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8500 /* Add input/output stream configurations for each scalar formats*/
8501 for (size_t j = 0; j < scalar_formats_count; j++) {
8502 switch (scalar_formats[j]) {
8503 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8504 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8505 case HAL_PIXEL_FORMAT_RAW10:
8506 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8507 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8508 addStreamConfig(available_stream_configs, scalar_formats[j],
8509 gCamCapability[cameraId]->raw_dim[i],
8510 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8511 }
8512 break;
8513 case HAL_PIXEL_FORMAT_BLOB:
8514 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8515 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8516 addStreamConfig(available_stream_configs, scalar_formats[j],
8517 gCamCapability[cameraId]->picture_sizes_tbl[i],
8518 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8519 }
8520 break;
8521 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8522 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8523 default:
8524 cam_dimension_t largest_picture_size;
8525 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8526 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8527 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8528 addStreamConfig(available_stream_configs, scalar_formats[j],
8529 gCamCapability[cameraId]->picture_sizes_tbl[i],
8530 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8531 /* Book keep largest */
8532 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8533 >= largest_picture_size.width &&
8534 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8535 >= largest_picture_size.height)
8536 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8537 }
8538 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8539 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8540 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8541 addStreamConfig(available_stream_configs, scalar_formats[j],
8542 largest_picture_size,
8543 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8544 }
8545 break;
8546 }
8547 }
8548
8549 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8550 available_stream_configs.array(), available_stream_configs.size());
8551 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8552 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8553
8554 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8555 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8556
8557 /* android.scaler.availableMinFrameDurations */
8558 Vector<int64_t> available_min_durations;
8559 for (size_t j = 0; j < scalar_formats_count; j++) {
8560 switch (scalar_formats[j]) {
8561 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8562 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8563 case HAL_PIXEL_FORMAT_RAW10:
8564 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8565 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8566 available_min_durations.add(scalar_formats[j]);
8567 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8568 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8569 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8570 }
8571 break;
8572 default:
8573 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8574 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8575 available_min_durations.add(scalar_formats[j]);
8576 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8577 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8578 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8579 }
8580 break;
8581 }
8582 }
8583 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8584 available_min_durations.array(), available_min_durations.size());
8585
8586 Vector<int32_t> available_hfr_configs;
8587 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8588 int32_t fps = 0;
8589 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8590 case CAM_HFR_MODE_60FPS:
8591 fps = 60;
8592 break;
8593 case CAM_HFR_MODE_90FPS:
8594 fps = 90;
8595 break;
8596 case CAM_HFR_MODE_120FPS:
8597 fps = 120;
8598 break;
8599 case CAM_HFR_MODE_150FPS:
8600 fps = 150;
8601 break;
8602 case CAM_HFR_MODE_180FPS:
8603 fps = 180;
8604 break;
8605 case CAM_HFR_MODE_210FPS:
8606 fps = 210;
8607 break;
8608 case CAM_HFR_MODE_240FPS:
8609 fps = 240;
8610 break;
8611 case CAM_HFR_MODE_480FPS:
8612 fps = 480;
8613 break;
8614 case CAM_HFR_MODE_OFF:
8615 case CAM_HFR_MODE_MAX:
8616 default:
8617 break;
8618 }
8619
8620 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8621 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8622 /* For each HFR frame rate, need to advertise one variable fps range
8623 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8624 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8625 * set by the app. When video recording is started, [120, 120] is
8626 * set. This way sensor configuration does not change when recording
8627 * is started */
8628
8629 /* (width, height, fps_min, fps_max, batch_size_max) */
8630 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8631 j < MAX_SIZES_CNT; j++) {
8632 available_hfr_configs.add(
8633 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8634 available_hfr_configs.add(
8635 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8636 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8637 available_hfr_configs.add(fps);
8638 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8639
8640 /* (width, height, fps_min, fps_max, batch_size_max) */
8641 available_hfr_configs.add(
8642 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8643 available_hfr_configs.add(
8644 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8645 available_hfr_configs.add(fps);
8646 available_hfr_configs.add(fps);
8647 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8648 }
8649 }
8650 }
8651 //Advertise HFR capability only if the property is set
8652 memset(prop, 0, sizeof(prop));
8653 property_get("persist.camera.hal3hfr.enable", prop, "1");
8654 uint8_t hfrEnable = (uint8_t)atoi(prop);
8655
8656 if(hfrEnable && available_hfr_configs.array()) {
8657 staticInfo.update(
8658 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8659 available_hfr_configs.array(), available_hfr_configs.size());
8660 }
8661
8662 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8663 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8664 &max_jpeg_size, 1);
8665
8666 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8667 size_t size = 0;
8668 count = CAM_EFFECT_MODE_MAX;
8669 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8670 for (size_t i = 0; i < count; i++) {
8671 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8672 gCamCapability[cameraId]->supported_effects[i]);
8673 if (NAME_NOT_FOUND != val) {
8674 avail_effects[size] = (uint8_t)val;
8675 size++;
8676 }
8677 }
8678 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8679 avail_effects,
8680 size);
8681
8682 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8683 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8684 size_t supported_scene_modes_cnt = 0;
8685 count = CAM_SCENE_MODE_MAX;
8686 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8687 for (size_t i = 0; i < count; i++) {
8688 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8689 CAM_SCENE_MODE_OFF) {
8690 int val = lookupFwkName(SCENE_MODES_MAP,
8691 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8692 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08008693
Thierry Strudel3d639192016-09-09 11:52:26 -07008694 if (NAME_NOT_FOUND != val) {
8695 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8696 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8697 supported_scene_modes_cnt++;
8698 }
8699 }
8700 }
8701 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8702 avail_scene_modes,
8703 supported_scene_modes_cnt);
8704
8705 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8706 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8707 supported_scene_modes_cnt,
8708 CAM_SCENE_MODE_MAX,
8709 scene_mode_overrides,
8710 supported_indexes,
8711 cameraId);
8712
8713 if (supported_scene_modes_cnt == 0) {
8714 supported_scene_modes_cnt = 1;
8715 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8716 }
8717
8718 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8719 scene_mode_overrides, supported_scene_modes_cnt * 3);
8720
8721 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8722 ANDROID_CONTROL_MODE_AUTO,
8723 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8724 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8725 available_control_modes,
8726 3);
8727
8728 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8729 size = 0;
8730 count = CAM_ANTIBANDING_MODE_MAX;
8731 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8732 for (size_t i = 0; i < count; i++) {
8733 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8734 gCamCapability[cameraId]->supported_antibandings[i]);
8735 if (NAME_NOT_FOUND != val) {
8736 avail_antibanding_modes[size] = (uint8_t)val;
8737 size++;
8738 }
8739
8740 }
8741 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8742 avail_antibanding_modes,
8743 size);
8744
8745 uint8_t avail_abberation_modes[] = {
8746 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8747 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8748 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8749 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8750 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8751 if (0 == count) {
8752 // If no aberration correction modes are available for a device, this advertise OFF mode
8753 size = 1;
8754 } else {
8755 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8756 // So, advertize all 3 modes if atleast any one mode is supported as per the
8757 // new M requirement
8758 size = 3;
8759 }
8760 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8761 avail_abberation_modes,
8762 size);
8763
8764 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8765 size = 0;
8766 count = CAM_FOCUS_MODE_MAX;
8767 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8768 for (size_t i = 0; i < count; i++) {
8769 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8770 gCamCapability[cameraId]->supported_focus_modes[i]);
8771 if (NAME_NOT_FOUND != val) {
8772 avail_af_modes[size] = (uint8_t)val;
8773 size++;
8774 }
8775 }
8776 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8777 avail_af_modes,
8778 size);
8779
8780 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8781 size = 0;
8782 count = CAM_WB_MODE_MAX;
8783 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8784 for (size_t i = 0; i < count; i++) {
8785 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8786 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8787 gCamCapability[cameraId]->supported_white_balances[i]);
8788 if (NAME_NOT_FOUND != val) {
8789 avail_awb_modes[size] = (uint8_t)val;
8790 size++;
8791 }
8792 }
8793 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8794 avail_awb_modes,
8795 size);
8796
8797 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8798 count = CAM_FLASH_FIRING_LEVEL_MAX;
8799 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8800 count);
8801 for (size_t i = 0; i < count; i++) {
8802 available_flash_levels[i] =
8803 gCamCapability[cameraId]->supported_firing_levels[i];
8804 }
8805 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8806 available_flash_levels, count);
8807
8808 uint8_t flashAvailable;
8809 if (gCamCapability[cameraId]->flash_available)
8810 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8811 else
8812 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8813 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8814 &flashAvailable, 1);
8815
8816 Vector<uint8_t> avail_ae_modes;
8817 count = CAM_AE_MODE_MAX;
8818 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8819 for (size_t i = 0; i < count; i++) {
8820 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8821 }
8822 if (flashAvailable) {
8823 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8824 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8825 }
8826 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8827 avail_ae_modes.array(),
8828 avail_ae_modes.size());
8829
8830 int32_t sensitivity_range[2];
8831 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8832 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8833 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8834 sensitivity_range,
8835 sizeof(sensitivity_range) / sizeof(int32_t));
8836
8837 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8838 &gCamCapability[cameraId]->max_analog_sensitivity,
8839 1);
8840
8841 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8842 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8843 &sensor_orientation,
8844 1);
8845
8846 int32_t max_output_streams[] = {
8847 MAX_STALLING_STREAMS,
8848 MAX_PROCESSED_STREAMS,
8849 MAX_RAW_STREAMS};
8850 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8851 max_output_streams,
8852 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8853
8854 uint8_t avail_leds = 0;
8855 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8856 &avail_leds, 0);
8857
8858 uint8_t focus_dist_calibrated;
8859 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8860 gCamCapability[cameraId]->focus_dist_calibrated);
8861 if (NAME_NOT_FOUND != val) {
8862 focus_dist_calibrated = (uint8_t)val;
8863 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8864 &focus_dist_calibrated, 1);
8865 }
8866
8867 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8868 size = 0;
8869 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8870 MAX_TEST_PATTERN_CNT);
8871 for (size_t i = 0; i < count; i++) {
8872 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8873 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8874 if (NAME_NOT_FOUND != testpatternMode) {
8875 avail_testpattern_modes[size] = testpatternMode;
8876 size++;
8877 }
8878 }
8879 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8880 avail_testpattern_modes,
8881 size);
8882
8883 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8884 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8885 &max_pipeline_depth,
8886 1);
8887
8888 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8889 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8890 &partial_result_count,
8891 1);
8892
8893 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8894 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8895
8896 Vector<uint8_t> available_capabilities;
8897 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8898 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8899 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8900 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8901 if (supportBurst) {
8902 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8903 }
8904 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8905 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8906 if (hfrEnable && available_hfr_configs.array()) {
8907 available_capabilities.add(
8908 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8909 }
8910
8911 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8912 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8913 }
8914 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8915 available_capabilities.array(),
8916 available_capabilities.size());
8917
8918 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8919 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8920 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8921 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8922
8923 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8924 &aeLockAvailable, 1);
8925
8926 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8927 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8928 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8929 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8930
8931 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8932 &awbLockAvailable, 1);
8933
8934 int32_t max_input_streams = 1;
8935 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8936 &max_input_streams,
8937 1);
8938
8939 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8940 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8941 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8942 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8943 HAL_PIXEL_FORMAT_YCbCr_420_888};
8944 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8945 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8946
8947 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8948 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8949 &max_latency,
8950 1);
8951
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008952#ifndef USE_HAL_3_3
8953 int32_t isp_sensitivity_range[2];
8954 isp_sensitivity_range[0] =
8955 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8956 isp_sensitivity_range[1] =
8957 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8958 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8959 isp_sensitivity_range,
8960 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8961#endif
8962
Thierry Strudel3d639192016-09-09 11:52:26 -07008963 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8964 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8965 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8966 available_hot_pixel_modes,
8967 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8968
8969 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8970 ANDROID_SHADING_MODE_FAST,
8971 ANDROID_SHADING_MODE_HIGH_QUALITY};
8972 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8973 available_shading_modes,
8974 3);
8975
8976 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8977 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8978 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8979 available_lens_shading_map_modes,
8980 2);
8981
8982 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8983 ANDROID_EDGE_MODE_FAST,
8984 ANDROID_EDGE_MODE_HIGH_QUALITY,
8985 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8986 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8987 available_edge_modes,
8988 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8989
8990 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8991 ANDROID_NOISE_REDUCTION_MODE_FAST,
8992 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8993 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8994 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8995 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8996 available_noise_red_modes,
8997 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8998
8999 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9000 ANDROID_TONEMAP_MODE_FAST,
9001 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9002 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9003 available_tonemap_modes,
9004 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9005
9006 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9007 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9008 available_hot_pixel_map_modes,
9009 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9010
9011 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9012 gCamCapability[cameraId]->reference_illuminant1);
9013 if (NAME_NOT_FOUND != val) {
9014 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9015 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9016 }
9017
9018 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9019 gCamCapability[cameraId]->reference_illuminant2);
9020 if (NAME_NOT_FOUND != val) {
9021 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9022 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9023 }
9024
9025 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9026 (void *)gCamCapability[cameraId]->forward_matrix1,
9027 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9028
9029 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9030 (void *)gCamCapability[cameraId]->forward_matrix2,
9031 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9032
9033 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9034 (void *)gCamCapability[cameraId]->color_transform1,
9035 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9036
9037 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9038 (void *)gCamCapability[cameraId]->color_transform2,
9039 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9040
9041 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9042 (void *)gCamCapability[cameraId]->calibration_transform1,
9043 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9044
9045 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9046 (void *)gCamCapability[cameraId]->calibration_transform2,
9047 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9048
9049 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9050 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9051 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9052 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9053 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9054 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9055 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9056 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9057 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9058 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9059 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9060 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9061 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9062 ANDROID_JPEG_GPS_COORDINATES,
9063 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9064 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9065 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9066 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9067 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9068 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9069 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9070 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9071 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9072 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009073#ifndef USE_HAL_3_3
9074 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9075#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009076 ANDROID_STATISTICS_FACE_DETECT_MODE,
9077 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9078 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9079 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009080 ANDROID_BLACK_LEVEL_LOCK,
9081 /* DevCamDebug metadata request_keys_basic */
9082 DEVCAMDEBUG_META_ENABLE,
9083 /* DevCamDebug metadata end */
9084 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009085
9086 size_t request_keys_cnt =
9087 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9088 Vector<int32_t> available_request_keys;
9089 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9090 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9091 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9092 }
9093
9094 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9095 available_request_keys.array(), available_request_keys.size());
9096
9097 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9098 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9099 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9100 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9101 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9102 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9103 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9104 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9105 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9106 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9107 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9108 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9109 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9110 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9111 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9112 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9113 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
9114 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
9115 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9116 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9117 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009118 ANDROID_STATISTICS_FACE_SCORES,
9119#ifndef USE_HAL_3_3
9120 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9121#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07009122 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009123 // DevCamDebug metadata result_keys_basic
9124 DEVCAMDEBUG_META_ENABLE,
9125 // DevCamDebug metadata result_keys AF
9126 DEVCAMDEBUG_AF_LENS_POSITION,
9127 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9128 DEVCAMDEBUG_AF_TOF_DISTANCE,
9129 DEVCAMDEBUG_AF_LUMA,
9130 DEVCAMDEBUG_AF_HAF_STATE,
9131 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9132 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9133 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9134 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9135 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9136 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9137 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9138 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9139 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9140 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9141 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9142 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9143 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9144 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9145 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9146 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9147 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9148 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9149 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9150 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9151 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9152 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9153 // DevCamDebug metadata result_keys AEC
9154 DEVCAMDEBUG_AEC_TARGET_LUMA,
9155 DEVCAMDEBUG_AEC_COMP_LUMA,
9156 DEVCAMDEBUG_AEC_AVG_LUMA,
9157 DEVCAMDEBUG_AEC_CUR_LUMA,
9158 DEVCAMDEBUG_AEC_LINECOUNT,
9159 DEVCAMDEBUG_AEC_REAL_GAIN,
9160 DEVCAMDEBUG_AEC_EXP_INDEX,
9161 DEVCAMDEBUG_AEC_LUX_IDX,
9162 // DevCamDebug metadata result_keys AWB
9163 DEVCAMDEBUG_AWB_R_GAIN,
9164 DEVCAMDEBUG_AWB_G_GAIN,
9165 DEVCAMDEBUG_AWB_B_GAIN,
9166 DEVCAMDEBUG_AWB_CCT,
9167 DEVCAMDEBUG_AWB_DECISION,
9168 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009169 };
9170
Thierry Strudel3d639192016-09-09 11:52:26 -07009171 size_t result_keys_cnt =
9172 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9173
9174 Vector<int32_t> available_result_keys;
9175 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9176 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9177 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9178 }
9179 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9180 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9181 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9182 }
9183 if (supportedFaceDetectMode == 1) {
9184 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9185 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9186 } else if ((supportedFaceDetectMode == 2) ||
9187 (supportedFaceDetectMode == 3)) {
9188 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9189 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9190 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009191#ifndef USE_HAL_3_3
9192 if (hasBlackRegions) {
9193 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9194 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9195 }
9196#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009197 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9198 available_result_keys.array(), available_result_keys.size());
9199
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009200 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009201 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9202 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9203 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9204 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9205 ANDROID_SCALER_CROPPING_TYPE,
9206 ANDROID_SYNC_MAX_LATENCY,
9207 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9208 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9209 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9210 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9211 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9212 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9213 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9214 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9215 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9216 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9217 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9218 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9219 ANDROID_LENS_FACING,
9220 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9221 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9222 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9223 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9224 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9225 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9226 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9227 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9228 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9229 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9230 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9231 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9232 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9233 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9234 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9235 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9236 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9237 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9238 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9239 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9240 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
9241 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
9242 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9243 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9244 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9245 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9246 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9247 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9248 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9249 ANDROID_CONTROL_AVAILABLE_MODES,
9250 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9251 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9252 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9253 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009254 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9255#ifndef USE_HAL_3_3
9256 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9257 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9258#endif
9259 };
9260
9261 Vector<int32_t> available_characteristics_keys;
9262 available_characteristics_keys.appendArray(characteristics_keys_basic,
9263 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9264#ifndef USE_HAL_3_3
9265 if (hasBlackRegions) {
9266 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9267 }
9268#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009270 available_characteristics_keys.array(),
9271 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009272
9273 /*available stall durations depend on the hw + sw and will be different for different devices */
9274 /*have to add for raw after implementation*/
9275 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9276 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9277
9278 Vector<int64_t> available_stall_durations;
9279 for (uint32_t j = 0; j < stall_formats_count; j++) {
9280 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9281 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9282 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9283 available_stall_durations.add(stall_formats[j]);
9284 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9285 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9286 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9287 }
9288 } else {
9289 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9290 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9291 available_stall_durations.add(stall_formats[j]);
9292 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9293 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9294 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9295 }
9296 }
9297 }
9298 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9299 available_stall_durations.array(),
9300 available_stall_durations.size());
9301
9302 //QCAMERA3_OPAQUE_RAW
9303 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9304 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9305 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9306 case LEGACY_RAW:
9307 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9308 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9309 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9310 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9311 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9312 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9313 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9314 break;
9315 case MIPI_RAW:
9316 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9317 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9318 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9319 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9320 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9321 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9322 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9323 break;
9324 default:
9325 LOGE("unknown opaque_raw_format %d",
9326 gCamCapability[cameraId]->opaque_raw_fmt);
9327 break;
9328 }
9329 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9330
9331 Vector<int32_t> strides;
9332 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9333 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9334 cam_stream_buf_plane_info_t buf_planes;
9335 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9336 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9337 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9338 &gCamCapability[cameraId]->padding_info, &buf_planes);
9339 strides.add(buf_planes.plane_info.mp[0].stride);
9340 }
9341 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9342 strides.size());
9343
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009344 //TBD: remove the following line once backend advertises zzHDR in feature mask
9345 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009346 //Video HDR default
9347 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9348 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009349 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009350 int32_t vhdr_mode[] = {
9351 QCAMERA3_VIDEO_HDR_MODE_OFF,
9352 QCAMERA3_VIDEO_HDR_MODE_ON};
9353
9354 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9355 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9356 vhdr_mode, vhdr_mode_count);
9357 }
9358
Thierry Strudel3d639192016-09-09 11:52:26 -07009359 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9360 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9361 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9362
9363 uint8_t isMonoOnly =
9364 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9365 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9366 &isMonoOnly, 1);
9367
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009368#ifndef USE_HAL_3_3
9369 Vector<int32_t> opaque_size;
9370 for (size_t j = 0; j < scalar_formats_count; j++) {
9371 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9372 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9373 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9374 cam_stream_buf_plane_info_t buf_planes;
9375
9376 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9377 &gCamCapability[cameraId]->padding_info, &buf_planes);
9378
9379 if (rc == 0) {
9380 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9381 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9382 opaque_size.add(buf_planes.plane_info.frame_len);
9383 }else {
9384 LOGE("raw frame calculation failed!");
9385 }
9386 }
9387 }
9388 }
9389
9390 if ((opaque_size.size() > 0) &&
9391 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9392 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9393 else
9394 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9395#endif
9396
Thierry Strudel04e026f2016-10-10 11:27:36 -07009397 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9398 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9399 size = 0;
9400 count = CAM_IR_MODE_MAX;
9401 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9402 for (size_t i = 0; i < count; i++) {
9403 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9404 gCamCapability[cameraId]->supported_ir_modes[i]);
9405 if (NAME_NOT_FOUND != val) {
9406 avail_ir_modes[size] = (int32_t)val;
9407 size++;
9408 }
9409 }
9410 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9411 avail_ir_modes, size);
9412 }
9413
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009414 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9415 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9416 size = 0;
9417 count = CAM_AEC_CONVERGENCE_MAX;
9418 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9419 for (size_t i = 0; i < count; i++) {
9420 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9421 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9422 if (NAME_NOT_FOUND != val) {
9423 available_instant_aec_modes[size] = (int32_t)val;
9424 size++;
9425 }
9426 }
9427 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9428 available_instant_aec_modes, size);
9429 }
9430
Thierry Strudel3d639192016-09-09 11:52:26 -07009431 gStaticMetadata[cameraId] = staticInfo.release();
9432 return rc;
9433}
9434
9435/*===========================================================================
9436 * FUNCTION : makeTable
9437 *
9438 * DESCRIPTION: make a table of sizes
9439 *
9440 * PARAMETERS :
9441 *
9442 *
9443 *==========================================================================*/
9444void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9445 size_t max_size, int32_t *sizeTable)
9446{
9447 size_t j = 0;
9448 if (size > max_size) {
9449 size = max_size;
9450 }
9451 for (size_t i = 0; i < size; i++) {
9452 sizeTable[j] = dimTable[i].width;
9453 sizeTable[j+1] = dimTable[i].height;
9454 j+=2;
9455 }
9456}
9457
9458/*===========================================================================
9459 * FUNCTION : makeFPSTable
9460 *
9461 * DESCRIPTION: make a table of fps ranges
9462 *
9463 * PARAMETERS :
9464 *
9465 *==========================================================================*/
9466void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9467 size_t max_size, int32_t *fpsRangesTable)
9468{
9469 size_t j = 0;
9470 if (size > max_size) {
9471 size = max_size;
9472 }
9473 for (size_t i = 0; i < size; i++) {
9474 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9475 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9476 j+=2;
9477 }
9478}
9479
9480/*===========================================================================
9481 * FUNCTION : makeOverridesList
9482 *
9483 * DESCRIPTION: make a list of scene mode overrides
9484 *
9485 * PARAMETERS :
9486 *
9487 *
9488 *==========================================================================*/
9489void QCamera3HardwareInterface::makeOverridesList(
9490 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9491 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9492{
9493 /*daemon will give a list of overrides for all scene modes.
9494 However we should send the fwk only the overrides for the scene modes
9495 supported by the framework*/
9496 size_t j = 0;
9497 if (size > max_size) {
9498 size = max_size;
9499 }
9500 size_t focus_count = CAM_FOCUS_MODE_MAX;
9501 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9502 focus_count);
9503 for (size_t i = 0; i < size; i++) {
9504 bool supt = false;
9505 size_t index = supported_indexes[i];
9506 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9507 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9508 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9509 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9510 overridesTable[index].awb_mode);
9511 if (NAME_NOT_FOUND != val) {
9512 overridesList[j+1] = (uint8_t)val;
9513 }
9514 uint8_t focus_override = overridesTable[index].af_mode;
9515 for (size_t k = 0; k < focus_count; k++) {
9516 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9517 supt = true;
9518 break;
9519 }
9520 }
9521 if (supt) {
9522 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9523 focus_override);
9524 if (NAME_NOT_FOUND != val) {
9525 overridesList[j+2] = (uint8_t)val;
9526 }
9527 } else {
9528 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9529 }
9530 j+=3;
9531 }
9532}
9533
9534/*===========================================================================
9535 * FUNCTION : filterJpegSizes
9536 *
9537 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9538 * could be downscaled to
9539 *
9540 * PARAMETERS :
9541 *
9542 * RETURN : length of jpegSizes array
9543 *==========================================================================*/
9544
9545size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9546 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9547 uint8_t downscale_factor)
9548{
9549 if (0 == downscale_factor) {
9550 downscale_factor = 1;
9551 }
9552
9553 int32_t min_width = active_array_size.width / downscale_factor;
9554 int32_t min_height = active_array_size.height / downscale_factor;
9555 size_t jpegSizesCnt = 0;
9556 if (processedSizesCnt > maxCount) {
9557 processedSizesCnt = maxCount;
9558 }
9559 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9560 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9561 jpegSizes[jpegSizesCnt] = processedSizes[i];
9562 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9563 jpegSizesCnt += 2;
9564 }
9565 }
9566 return jpegSizesCnt;
9567}
9568
9569/*===========================================================================
9570 * FUNCTION : computeNoiseModelEntryS
9571 *
9572 * DESCRIPTION: function to map a given sensitivity to the S noise
9573 * model parameters in the DNG noise model.
9574 *
9575 * PARAMETERS : sens : the sensor sensitivity
9576 *
9577 ** RETURN : S (sensor amplification) noise
9578 *
9579 *==========================================================================*/
9580double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9581 double s = gCamCapability[mCameraId]->gradient_S * sens +
9582 gCamCapability[mCameraId]->offset_S;
9583 return ((s < 0.0) ? 0.0 : s);
9584}
9585
9586/*===========================================================================
9587 * FUNCTION : computeNoiseModelEntryO
9588 *
9589 * DESCRIPTION: function to map a given sensitivity to the O noise
9590 * model parameters in the DNG noise model.
9591 *
9592 * PARAMETERS : sens : the sensor sensitivity
9593 *
9594 ** RETURN : O (sensor readout) noise
9595 *
9596 *==========================================================================*/
9597double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9598 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9599 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9600 1.0 : (1.0 * sens / max_analog_sens);
9601 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9602 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9603 return ((o < 0.0) ? 0.0 : o);
9604}
9605
9606/*===========================================================================
9607 * FUNCTION : getSensorSensitivity
9608 *
9609 * DESCRIPTION: convert iso_mode to an integer value
9610 *
9611 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9612 *
9613 ** RETURN : sensitivity supported by sensor
9614 *
9615 *==========================================================================*/
9616int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9617{
9618 int32_t sensitivity;
9619
9620 switch (iso_mode) {
9621 case CAM_ISO_MODE_100:
9622 sensitivity = 100;
9623 break;
9624 case CAM_ISO_MODE_200:
9625 sensitivity = 200;
9626 break;
9627 case CAM_ISO_MODE_400:
9628 sensitivity = 400;
9629 break;
9630 case CAM_ISO_MODE_800:
9631 sensitivity = 800;
9632 break;
9633 case CAM_ISO_MODE_1600:
9634 sensitivity = 1600;
9635 break;
9636 default:
9637 sensitivity = -1;
9638 break;
9639 }
9640 return sensitivity;
9641}
9642
9643/*===========================================================================
9644 * FUNCTION : getCamInfo
9645 *
9646 * DESCRIPTION: query camera capabilities
9647 *
9648 * PARAMETERS :
9649 * @cameraId : camera Id
9650 * @info : camera info struct to be filled in with camera capabilities
9651 *
9652 * RETURN : int type of status
9653 * NO_ERROR -- success
9654 * none-zero failure code
9655 *==========================================================================*/
9656int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9657 struct camera_info *info)
9658{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009659 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009660 int rc = 0;
9661
9662 pthread_mutex_lock(&gCamLock);
9663 if (NULL == gCamCapability[cameraId]) {
9664 rc = initCapabilities(cameraId);
9665 if (rc < 0) {
9666 pthread_mutex_unlock(&gCamLock);
9667 return rc;
9668 }
9669 }
9670
9671 if (NULL == gStaticMetadata[cameraId]) {
9672 rc = initStaticMetadata(cameraId);
9673 if (rc < 0) {
9674 pthread_mutex_unlock(&gCamLock);
9675 return rc;
9676 }
9677 }
9678
9679 switch(gCamCapability[cameraId]->position) {
9680 case CAM_POSITION_BACK:
9681 case CAM_POSITION_BACK_AUX:
9682 info->facing = CAMERA_FACING_BACK;
9683 break;
9684
9685 case CAM_POSITION_FRONT:
9686 case CAM_POSITION_FRONT_AUX:
9687 info->facing = CAMERA_FACING_FRONT;
9688 break;
9689
9690 default:
9691 LOGE("Unknown position type %d for camera id:%d",
9692 gCamCapability[cameraId]->position, cameraId);
9693 rc = -1;
9694 break;
9695 }
9696
9697
9698 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009699#ifndef USE_HAL_3_3
9700 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9701#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009702 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009703#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009704 info->static_camera_characteristics = gStaticMetadata[cameraId];
9705
9706 //For now assume both cameras can operate independently.
9707 info->conflicting_devices = NULL;
9708 info->conflicting_devices_length = 0;
9709
9710 //resource cost is 100 * MIN(1.0, m/M),
9711 //where m is throughput requirement with maximum stream configuration
9712 //and M is CPP maximum throughput.
9713 float max_fps = 0.0;
9714 for (uint32_t i = 0;
9715 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9716 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9717 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9718 }
9719 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9720 gCamCapability[cameraId]->active_array_size.width *
9721 gCamCapability[cameraId]->active_array_size.height * max_fps /
9722 gCamCapability[cameraId]->max_pixel_bandwidth;
9723 info->resource_cost = 100 * MIN(1.0, ratio);
9724 LOGI("camera %d resource cost is %d", cameraId,
9725 info->resource_cost);
9726
9727 pthread_mutex_unlock(&gCamLock);
9728 return rc;
9729}
9730
9731/*===========================================================================
9732 * FUNCTION : translateCapabilityToMetadata
9733 *
9734 * DESCRIPTION: translate the capability into camera_metadata_t
9735 *
9736 * PARAMETERS : type of the request
9737 *
9738 *
9739 * RETURN : success: camera_metadata_t*
9740 * failure: NULL
9741 *
9742 *==========================================================================*/
9743camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9744{
9745 if (mDefaultMetadata[type] != NULL) {
9746 return mDefaultMetadata[type];
9747 }
9748 //first time we are handling this request
9749 //fill up the metadata structure using the wrapper class
9750 CameraMetadata settings;
9751 //translate from cam_capability_t to camera_metadata_tag_t
9752 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9753 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9754 int32_t defaultRequestID = 0;
9755 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9756
9757 /* OIS disable */
9758 char ois_prop[PROPERTY_VALUE_MAX];
9759 memset(ois_prop, 0, sizeof(ois_prop));
9760 property_get("persist.camera.ois.disable", ois_prop, "0");
9761 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9762
9763 /* Force video to use OIS */
9764 char videoOisProp[PROPERTY_VALUE_MAX];
9765 memset(videoOisProp, 0, sizeof(videoOisProp));
9766 property_get("persist.camera.ois.video", videoOisProp, "1");
9767 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009768
9769 // Hybrid AE enable/disable
9770 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9771 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9772 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9773 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9774
Thierry Strudel3d639192016-09-09 11:52:26 -07009775 uint8_t controlIntent = 0;
9776 uint8_t focusMode;
9777 uint8_t vsMode;
9778 uint8_t optStabMode;
9779 uint8_t cacMode;
9780 uint8_t edge_mode;
9781 uint8_t noise_red_mode;
9782 uint8_t tonemap_mode;
9783 bool highQualityModeEntryAvailable = FALSE;
9784 bool fastModeEntryAvailable = FALSE;
9785 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9786 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009787
9788 char videoHdrProp[PROPERTY_VALUE_MAX];
9789 memset(videoHdrProp, 0, sizeof(videoHdrProp));
9790 property_get("persist.camera.hdr.video", videoHdrProp, "0");
9791 uint8_t hdr_mode = (uint8_t)atoi(videoHdrProp);
9792
Thierry Strudel3d639192016-09-09 11:52:26 -07009793 switch (type) {
9794 case CAMERA3_TEMPLATE_PREVIEW:
9795 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9796 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9797 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9798 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9799 edge_mode = ANDROID_EDGE_MODE_FAST;
9800 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9801 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9802 break;
9803 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9804 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9805 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9806 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9807 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9808 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9809 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9810 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9811 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9812 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9813 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9814 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9815 highQualityModeEntryAvailable = TRUE;
9816 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9817 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9818 fastModeEntryAvailable = TRUE;
9819 }
9820 }
9821 if (highQualityModeEntryAvailable) {
9822 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9823 } else if (fastModeEntryAvailable) {
9824 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9825 }
9826 break;
9827 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9828 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9829 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9830 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009831 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9832 edge_mode = ANDROID_EDGE_MODE_FAST;
9833 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9834 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9835 if (forceVideoOis)
9836 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009837 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009838 break;
9839 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9840 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9841 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9842 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009843 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9844 edge_mode = ANDROID_EDGE_MODE_FAST;
9845 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9846 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9847 if (forceVideoOis)
9848 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009849 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009850 break;
9851 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9852 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9853 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9854 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9855 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9856 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9857 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9858 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9859 break;
9860 case CAMERA3_TEMPLATE_MANUAL:
9861 edge_mode = ANDROID_EDGE_MODE_FAST;
9862 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9863 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9864 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9865 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9866 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9867 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9868 break;
9869 default:
9870 edge_mode = ANDROID_EDGE_MODE_FAST;
9871 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9872 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9873 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9874 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9875 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9876 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9877 break;
9878 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009879 // Set CAC to OFF if underlying device doesn't support
9880 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9881 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9882 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009883 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9884 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9885 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9886 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9887 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9888 }
9889 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9890
9891 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9892 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9893 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9894 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9895 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9896 || ois_disable)
9897 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9898 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9899
9900 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9901 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9902
9903 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9904 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9905
9906 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9907 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9908
9909 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9910 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9911
9912 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9913 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9914
9915 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9916 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9917
9918 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9919 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9920
9921 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9922 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9923
9924 /*flash*/
9925 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9926 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9927
9928 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9929 settings.update(ANDROID_FLASH_FIRING_POWER,
9930 &flashFiringLevel, 1);
9931
9932 /* lens */
9933 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9934 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9935
9936 if (gCamCapability[mCameraId]->filter_densities_count) {
9937 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9938 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9939 gCamCapability[mCameraId]->filter_densities_count);
9940 }
9941
9942 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9943 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9944
9945 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9946 float default_focus_distance = 0;
9947 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9948 }
9949
9950 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9951 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9952
9953 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9954 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9955
9956 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9957 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9958
9959 /* face detection (default to OFF) */
9960 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9961 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9962
9963 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9964 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9965
9966 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9967 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9968
9969 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9970 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9971
9972 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9973 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9974
9975 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9976 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9977
9978 /* Exposure time(Update the Min Exposure Time)*/
9979 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9980 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9981
9982 /* frame duration */
9983 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9984 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9985
9986 /* sensitivity */
9987 static const int32_t default_sensitivity = 100;
9988 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009989#ifndef USE_HAL_3_3
9990 static const int32_t default_isp_sensitivity =
9991 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9992 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9993#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009994
9995 /*edge mode*/
9996 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9997
9998 /*noise reduction mode*/
9999 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10000
10001 /*color correction mode*/
10002 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10003 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10004
10005 /*transform matrix mode*/
10006 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10007
10008 int32_t scaler_crop_region[4];
10009 scaler_crop_region[0] = 0;
10010 scaler_crop_region[1] = 0;
10011 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10012 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10013 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10014
10015 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10016 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10017
10018 /*focus distance*/
10019 float focus_distance = 0.0;
10020 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10021
10022 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010023 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010024 float max_range = 0.0;
10025 float max_fixed_fps = 0.0;
10026 int32_t fps_range[2] = {0, 0};
10027 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10028 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010029 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10030 TEMPLATE_MAX_PREVIEW_FPS) {
10031 continue;
10032 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010033 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10034 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10035 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10036 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10037 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10038 if (range > max_range) {
10039 fps_range[0] =
10040 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10041 fps_range[1] =
10042 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10043 max_range = range;
10044 }
10045 } else {
10046 if (range < 0.01 && max_fixed_fps <
10047 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10048 fps_range[0] =
10049 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10050 fps_range[1] =
10051 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10052 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10053 }
10054 }
10055 }
10056 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10057
10058 /*precapture trigger*/
10059 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10060 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10061
10062 /*af trigger*/
10063 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10064 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10065
10066 /* ae & af regions */
10067 int32_t active_region[] = {
10068 gCamCapability[mCameraId]->active_array_size.left,
10069 gCamCapability[mCameraId]->active_array_size.top,
10070 gCamCapability[mCameraId]->active_array_size.left +
10071 gCamCapability[mCameraId]->active_array_size.width,
10072 gCamCapability[mCameraId]->active_array_size.top +
10073 gCamCapability[mCameraId]->active_array_size.height,
10074 0};
10075 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10076 sizeof(active_region) / sizeof(active_region[0]));
10077 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10078 sizeof(active_region) / sizeof(active_region[0]));
10079
10080 /* black level lock */
10081 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10082 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10083
10084 /* lens shading map mode */
10085 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10086 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10087 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10088 }
10089 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10090
10091 //special defaults for manual template
10092 if (type == CAMERA3_TEMPLATE_MANUAL) {
10093 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10094 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10095
10096 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10097 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10098
10099 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10100 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10101
10102 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10103 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10104
10105 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10106 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10107
10108 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10109 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10110 }
10111
10112
10113 /* TNR
10114 * We'll use this location to determine which modes TNR will be set.
10115 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10116 * This is not to be confused with linking on a per stream basis that decision
10117 * is still on per-session basis and will be handled as part of config stream
10118 */
10119 uint8_t tnr_enable = 0;
10120
10121 if (m_bTnrPreview || m_bTnrVideo) {
10122
10123 switch (type) {
10124 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10125 tnr_enable = 1;
10126 break;
10127
10128 default:
10129 tnr_enable = 0;
10130 break;
10131 }
10132
10133 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10134 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10135 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10136
10137 LOGD("TNR:%d with process plate %d for template:%d",
10138 tnr_enable, tnr_process_type, type);
10139 }
10140
10141 //Update Link tags to default
10142 int32_t sync_type = CAM_TYPE_STANDALONE;
10143 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10144
10145 int32_t is_main = 0; //this doesn't matter as app should overwrite
10146 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10147
10148 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10149
10150 /* CDS default */
10151 char prop[PROPERTY_VALUE_MAX];
10152 memset(prop, 0, sizeof(prop));
10153 property_get("persist.camera.CDS", prop, "Auto");
10154 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10155 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10156 if (CAM_CDS_MODE_MAX == cds_mode) {
10157 cds_mode = CAM_CDS_MODE_AUTO;
10158 }
10159
10160 /* Disabling CDS in templates which have TNR enabled*/
10161 if (tnr_enable)
10162 cds_mode = CAM_CDS_MODE_OFF;
10163
10164 int32_t mode = cds_mode;
10165 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010166
Thierry Strudel04e026f2016-10-10 11:27:36 -070010167 /* IR Mode Default Off */
10168 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
10169 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
10170
Thierry Strudel269c81a2016-10-12 12:13:59 -070010171 /* Manual Convergence AEC Speed is disabled by default*/
10172 float default_aec_speed = 0;
10173 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10174
10175 /* Manual Convergence AWB Speed is disabled by default*/
10176 float default_awb_speed = 0;
10177 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10178
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010179 // Set instant AEC to normal convergence by default
10180 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10181 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10182
Shuzhen Wang19463d72016-03-08 11:09:52 -080010183 /* hybrid ae */
10184 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10185
Thierry Strudel3d639192016-09-09 11:52:26 -070010186 mDefaultMetadata[type] = settings.release();
10187
10188 return mDefaultMetadata[type];
10189}
10190
10191/*===========================================================================
10192 * FUNCTION : setFrameParameters
10193 *
10194 * DESCRIPTION: set parameters per frame as requested in the metadata from
10195 * framework
10196 *
10197 * PARAMETERS :
10198 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010199 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010200 * @blob_request: Whether this request is a blob request or not
10201 *
10202 * RETURN : success: NO_ERROR
10203 * failure:
10204 *==========================================================================*/
10205int QCamera3HardwareInterface::setFrameParameters(
10206 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010207 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010208 int blob_request,
10209 uint32_t snapshotStreamId)
10210{
10211 /*translate from camera_metadata_t type to parm_type_t*/
10212 int rc = 0;
10213 int32_t hal_version = CAM_HAL_V3;
10214
10215 clear_metadata_buffer(mParameters);
10216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10217 LOGE("Failed to set hal version in the parameters");
10218 return BAD_VALUE;
10219 }
10220
10221 /*we need to update the frame number in the parameters*/
10222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10223 request->frame_number)) {
10224 LOGE("Failed to set the frame number in the parameters");
10225 return BAD_VALUE;
10226 }
10227
10228 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010229 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010230 LOGE("Failed to set stream type mask in the parameters");
10231 return BAD_VALUE;
10232 }
10233
10234 if (mUpdateDebugLevel) {
10235 uint32_t dummyDebugLevel = 0;
10236 /* The value of dummyDebugLevel is irrelavent. On
10237 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10239 dummyDebugLevel)) {
10240 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10241 return BAD_VALUE;
10242 }
10243 mUpdateDebugLevel = false;
10244 }
10245
10246 if(request->settings != NULL){
10247 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10248 if (blob_request)
10249 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10250 }
10251
10252 return rc;
10253}
10254
10255/*===========================================================================
10256 * FUNCTION : setReprocParameters
10257 *
10258 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10259 * return it.
10260 *
10261 * PARAMETERS :
10262 * @request : request that needs to be serviced
10263 *
10264 * RETURN : success: NO_ERROR
10265 * failure:
10266 *==========================================================================*/
10267int32_t QCamera3HardwareInterface::setReprocParameters(
10268 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10269 uint32_t snapshotStreamId)
10270{
10271 /*translate from camera_metadata_t type to parm_type_t*/
10272 int rc = 0;
10273
10274 if (NULL == request->settings){
10275 LOGE("Reprocess settings cannot be NULL");
10276 return BAD_VALUE;
10277 }
10278
10279 if (NULL == reprocParam) {
10280 LOGE("Invalid reprocessing metadata buffer");
10281 return BAD_VALUE;
10282 }
10283 clear_metadata_buffer(reprocParam);
10284
10285 /*we need to update the frame number in the parameters*/
10286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10287 request->frame_number)) {
10288 LOGE("Failed to set the frame number in the parameters");
10289 return BAD_VALUE;
10290 }
10291
10292 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10293 if (rc < 0) {
10294 LOGE("Failed to translate reproc request");
10295 return rc;
10296 }
10297
10298 CameraMetadata frame_settings;
10299 frame_settings = request->settings;
10300 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10301 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10302 int32_t *crop_count =
10303 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10304 int32_t *crop_data =
10305 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10306 int32_t *roi_map =
10307 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10308 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10309 cam_crop_data_t crop_meta;
10310 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10311 crop_meta.num_of_streams = 1;
10312 crop_meta.crop_info[0].crop.left = crop_data[0];
10313 crop_meta.crop_info[0].crop.top = crop_data[1];
10314 crop_meta.crop_info[0].crop.width = crop_data[2];
10315 crop_meta.crop_info[0].crop.height = crop_data[3];
10316
10317 crop_meta.crop_info[0].roi_map.left =
10318 roi_map[0];
10319 crop_meta.crop_info[0].roi_map.top =
10320 roi_map[1];
10321 crop_meta.crop_info[0].roi_map.width =
10322 roi_map[2];
10323 crop_meta.crop_info[0].roi_map.height =
10324 roi_map[3];
10325
10326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10327 rc = BAD_VALUE;
10328 }
10329 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10330 request->input_buffer->stream,
10331 crop_meta.crop_info[0].crop.left,
10332 crop_meta.crop_info[0].crop.top,
10333 crop_meta.crop_info[0].crop.width,
10334 crop_meta.crop_info[0].crop.height);
10335 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10336 request->input_buffer->stream,
10337 crop_meta.crop_info[0].roi_map.left,
10338 crop_meta.crop_info[0].roi_map.top,
10339 crop_meta.crop_info[0].roi_map.width,
10340 crop_meta.crop_info[0].roi_map.height);
10341 } else {
10342 LOGE("Invalid reprocess crop count %d!", *crop_count);
10343 }
10344 } else {
10345 LOGE("No crop data from matching output stream");
10346 }
10347
10348 /* These settings are not needed for regular requests so handle them specially for
10349 reprocess requests; information needed for EXIF tags */
10350 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10351 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10352 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10353 if (NAME_NOT_FOUND != val) {
10354 uint32_t flashMode = (uint32_t)val;
10355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10356 rc = BAD_VALUE;
10357 }
10358 } else {
10359 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10360 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10361 }
10362 } else {
10363 LOGH("No flash mode in reprocess settings");
10364 }
10365
10366 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10367 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10369 rc = BAD_VALUE;
10370 }
10371 } else {
10372 LOGH("No flash state in reprocess settings");
10373 }
10374
10375 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10376 uint8_t *reprocessFlags =
10377 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10379 *reprocessFlags)) {
10380 rc = BAD_VALUE;
10381 }
10382 }
10383
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010384 // Add metadata which reprocess needs
10385 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10386 cam_reprocess_info_t *repro_info =
10387 (cam_reprocess_info_t *)frame_settings.find
10388 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010389 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010390 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010391 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010392 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010393 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010394 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010395 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010396 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010397 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010398 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010399 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010400 repro_info->pipeline_flip);
10401 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10402 repro_info->af_roi);
10403 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10404 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010405 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10406 CAM_INTF_PARM_ROTATION metadata then has been added in
10407 translateToHalMetadata. HAL need to keep this new rotation
10408 metadata. Otherwise, the old rotation info saved in the vendor tag
10409 would be used */
10410 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10411 CAM_INTF_PARM_ROTATION, reprocParam) {
10412 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10413 } else {
10414 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010415 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010416 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010417 }
10418
10419 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10420 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10421 roi.width and roi.height would be the final JPEG size.
10422 For now, HAL only checks this for reprocess request */
10423 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10424 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10425 uint8_t *enable =
10426 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10427 if (*enable == TRUE) {
10428 int32_t *crop_data =
10429 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10430 cam_stream_crop_info_t crop_meta;
10431 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10432 crop_meta.stream_id = 0;
10433 crop_meta.crop.left = crop_data[0];
10434 crop_meta.crop.top = crop_data[1];
10435 crop_meta.crop.width = crop_data[2];
10436 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010437 // The JPEG crop roi should match cpp output size
10438 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10439 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10440 crop_meta.roi_map.left = 0;
10441 crop_meta.roi_map.top = 0;
10442 crop_meta.roi_map.width = cpp_crop->crop.width;
10443 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010444 }
10445 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10446 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010447 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010448 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010449 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10450 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010451 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010452 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10453
10454 // Add JPEG scale information
10455 cam_dimension_t scale_dim;
10456 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10457 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10458 int32_t *roi =
10459 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10460 scale_dim.width = roi[2];
10461 scale_dim.height = roi[3];
10462 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10463 scale_dim);
10464 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10465 scale_dim.width, scale_dim.height, mCameraId);
10466 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010467 }
10468 }
10469
10470 return rc;
10471}
10472
10473/*===========================================================================
10474 * FUNCTION : saveRequestSettings
10475 *
10476 * DESCRIPTION: Add any settings that might have changed to the request settings
10477 * and save the settings to be applied on the frame
10478 *
10479 * PARAMETERS :
10480 * @jpegMetadata : the extracted and/or modified jpeg metadata
10481 * @request : request with initial settings
10482 *
10483 * RETURN :
10484 * camera_metadata_t* : pointer to the saved request settings
10485 *==========================================================================*/
10486camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10487 const CameraMetadata &jpegMetadata,
10488 camera3_capture_request_t *request)
10489{
10490 camera_metadata_t *resultMetadata;
10491 CameraMetadata camMetadata;
10492 camMetadata = request->settings;
10493
10494 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10495 int32_t thumbnail_size[2];
10496 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10497 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10498 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10499 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10500 }
10501
10502 if (request->input_buffer != NULL) {
10503 uint8_t reprocessFlags = 1;
10504 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10505 (uint8_t*)&reprocessFlags,
10506 sizeof(reprocessFlags));
10507 }
10508
10509 resultMetadata = camMetadata.release();
10510 return resultMetadata;
10511}
10512
10513/*===========================================================================
10514 * FUNCTION : setHalFpsRange
10515 *
10516 * DESCRIPTION: set FPS range parameter
10517 *
10518 *
10519 * PARAMETERS :
10520 * @settings : Metadata from framework
10521 * @hal_metadata: Metadata buffer
10522 *
10523 *
10524 * RETURN : success: NO_ERROR
10525 * failure:
10526 *==========================================================================*/
10527int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10528 metadata_buffer_t *hal_metadata)
10529{
10530 int32_t rc = NO_ERROR;
10531 cam_fps_range_t fps_range;
10532 fps_range.min_fps = (float)
10533 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10534 fps_range.max_fps = (float)
10535 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10536 fps_range.video_min_fps = fps_range.min_fps;
10537 fps_range.video_max_fps = fps_range.max_fps;
10538
10539 LOGD("aeTargetFpsRange fps: [%f %f]",
10540 fps_range.min_fps, fps_range.max_fps);
10541 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10542 * follows:
10543 * ---------------------------------------------------------------|
10544 * Video stream is absent in configure_streams |
10545 * (Camcorder preview before the first video record |
10546 * ---------------------------------------------------------------|
10547 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10548 * | | | vid_min/max_fps|
10549 * ---------------------------------------------------------------|
10550 * NO | [ 30, 240] | 240 | [240, 240] |
10551 * |-------------|-------------|----------------|
10552 * | [240, 240] | 240 | [240, 240] |
10553 * ---------------------------------------------------------------|
10554 * Video stream is present in configure_streams |
10555 * ---------------------------------------------------------------|
10556 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10557 * | | | vid_min/max_fps|
10558 * ---------------------------------------------------------------|
10559 * NO | [ 30, 240] | 240 | [240, 240] |
10560 * (camcorder prev |-------------|-------------|----------------|
10561 * after video rec | [240, 240] | 240 | [240, 240] |
10562 * is stopped) | | | |
10563 * ---------------------------------------------------------------|
10564 * YES | [ 30, 240] | 240 | [240, 240] |
10565 * |-------------|-------------|----------------|
10566 * | [240, 240] | 240 | [240, 240] |
10567 * ---------------------------------------------------------------|
10568 * When Video stream is absent in configure_streams,
10569 * preview fps = sensor_fps / batchsize
10570 * Eg: for 240fps at batchSize 4, preview = 60fps
10571 * for 120fps at batchSize 4, preview = 30fps
10572 *
10573 * When video stream is present in configure_streams, preview fps is as per
10574 * the ratio of preview buffers to video buffers requested in process
10575 * capture request
10576 */
10577 mBatchSize = 0;
10578 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10579 fps_range.min_fps = fps_range.video_max_fps;
10580 fps_range.video_min_fps = fps_range.video_max_fps;
10581 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10582 fps_range.max_fps);
10583 if (NAME_NOT_FOUND != val) {
10584 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10586 return BAD_VALUE;
10587 }
10588
10589 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10590 /* If batchmode is currently in progress and the fps changes,
10591 * set the flag to restart the sensor */
10592 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10593 (mHFRVideoFps != fps_range.max_fps)) {
10594 mNeedSensorRestart = true;
10595 }
10596 mHFRVideoFps = fps_range.max_fps;
10597 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10598 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10599 mBatchSize = MAX_HFR_BATCH_SIZE;
10600 }
10601 }
10602 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10603
10604 }
10605 } else {
10606 /* HFR mode is session param in backend/ISP. This should be reset when
10607 * in non-HFR mode */
10608 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10610 return BAD_VALUE;
10611 }
10612 }
10613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10614 return BAD_VALUE;
10615 }
10616 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10617 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10618 return rc;
10619}
10620
10621/*===========================================================================
10622 * FUNCTION : translateToHalMetadata
10623 *
10624 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10625 *
10626 *
10627 * PARAMETERS :
10628 * @request : request sent from framework
10629 *
10630 *
10631 * RETURN : success: NO_ERROR
10632 * failure:
10633 *==========================================================================*/
10634int QCamera3HardwareInterface::translateToHalMetadata
10635 (const camera3_capture_request_t *request,
10636 metadata_buffer_t *hal_metadata,
10637 uint32_t snapshotStreamId)
10638{
10639 int rc = 0;
10640 CameraMetadata frame_settings;
10641 frame_settings = request->settings;
10642
10643 /* Do not change the order of the following list unless you know what you are
10644 * doing.
10645 * The order is laid out in such a way that parameters in the front of the table
10646 * may be used to override the parameters later in the table. Examples are:
10647 * 1. META_MODE should precede AEC/AWB/AF MODE
10648 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10649 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10650 * 4. Any mode should precede it's corresponding settings
10651 */
10652 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10653 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10655 rc = BAD_VALUE;
10656 }
10657 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10658 if (rc != NO_ERROR) {
10659 LOGE("extractSceneMode failed");
10660 }
10661 }
10662
10663 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10664 uint8_t fwk_aeMode =
10665 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10666 uint8_t aeMode;
10667 int32_t redeye;
10668
10669 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10670 aeMode = CAM_AE_MODE_OFF;
10671 } else {
10672 aeMode = CAM_AE_MODE_ON;
10673 }
10674 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10675 redeye = 1;
10676 } else {
10677 redeye = 0;
10678 }
10679
10680 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10681 fwk_aeMode);
10682 if (NAME_NOT_FOUND != val) {
10683 int32_t flashMode = (int32_t)val;
10684 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10685 }
10686
10687 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10689 rc = BAD_VALUE;
10690 }
10691 }
10692
10693 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10694 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10695 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10696 fwk_whiteLevel);
10697 if (NAME_NOT_FOUND != val) {
10698 uint8_t whiteLevel = (uint8_t)val;
10699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10700 rc = BAD_VALUE;
10701 }
10702 }
10703 }
10704
10705 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10706 uint8_t fwk_cacMode =
10707 frame_settings.find(
10708 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10709 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10710 fwk_cacMode);
10711 if (NAME_NOT_FOUND != val) {
10712 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10713 bool entryAvailable = FALSE;
10714 // Check whether Frameworks set CAC mode is supported in device or not
10715 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10716 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10717 entryAvailable = TRUE;
10718 break;
10719 }
10720 }
10721 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10722 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10723 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10724 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10725 if (entryAvailable == FALSE) {
10726 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10727 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10728 } else {
10729 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10730 // High is not supported and so set the FAST as spec say's underlying
10731 // device implementation can be the same for both modes.
10732 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10733 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10734 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10735 // in order to avoid the fps drop due to high quality
10736 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10737 } else {
10738 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10739 }
10740 }
10741 }
10742 LOGD("Final cacMode is %d", cacMode);
10743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10744 rc = BAD_VALUE;
10745 }
10746 } else {
10747 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10748 }
10749 }
10750
10751 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10752 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10753 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10754 fwk_focusMode);
10755 if (NAME_NOT_FOUND != val) {
10756 uint8_t focusMode = (uint8_t)val;
10757 LOGD("set focus mode %d", focusMode);
10758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10759 rc = BAD_VALUE;
10760 }
10761 }
10762 }
10763
10764 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10765 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10767 focalDistance)) {
10768 rc = BAD_VALUE;
10769 }
10770 }
10771
10772 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10773 uint8_t fwk_antibandingMode =
10774 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10775 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10776 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10777 if (NAME_NOT_FOUND != val) {
10778 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010779 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10780 if (m60HzZone) {
10781 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10782 } else {
10783 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10784 }
10785 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10787 hal_antibandingMode)) {
10788 rc = BAD_VALUE;
10789 }
10790 }
10791 }
10792
10793 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10794 int32_t expCompensation = frame_settings.find(
10795 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10796 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10797 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10798 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10799 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010800 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010801 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10802 expCompensation)) {
10803 rc = BAD_VALUE;
10804 }
10805 }
10806
10807 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10808 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10810 rc = BAD_VALUE;
10811 }
10812 }
10813 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10814 rc = setHalFpsRange(frame_settings, hal_metadata);
10815 if (rc != NO_ERROR) {
10816 LOGE("setHalFpsRange failed");
10817 }
10818 }
10819
10820 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10821 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10823 rc = BAD_VALUE;
10824 }
10825 }
10826
10827 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10828 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10829 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10830 fwk_effectMode);
10831 if (NAME_NOT_FOUND != val) {
10832 uint8_t effectMode = (uint8_t)val;
10833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10834 rc = BAD_VALUE;
10835 }
10836 }
10837 }
10838
10839 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10840 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10842 colorCorrectMode)) {
10843 rc = BAD_VALUE;
10844 }
10845 }
10846
10847 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10848 cam_color_correct_gains_t colorCorrectGains;
10849 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10850 colorCorrectGains.gains[i] =
10851 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10852 }
10853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10854 colorCorrectGains)) {
10855 rc = BAD_VALUE;
10856 }
10857 }
10858
10859 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10860 cam_color_correct_matrix_t colorCorrectTransform;
10861 cam_rational_type_t transform_elem;
10862 size_t num = 0;
10863 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10864 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10865 transform_elem.numerator =
10866 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10867 transform_elem.denominator =
10868 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10869 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10870 num++;
10871 }
10872 }
10873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10874 colorCorrectTransform)) {
10875 rc = BAD_VALUE;
10876 }
10877 }
10878
10879 cam_trigger_t aecTrigger;
10880 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10881 aecTrigger.trigger_id = -1;
10882 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10883 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10884 aecTrigger.trigger =
10885 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10886 aecTrigger.trigger_id =
10887 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10889 aecTrigger)) {
10890 rc = BAD_VALUE;
10891 }
10892 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10893 aecTrigger.trigger, aecTrigger.trigger_id);
10894 }
10895
10896 /*af_trigger must come with a trigger id*/
10897 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10898 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10899 cam_trigger_t af_trigger;
10900 af_trigger.trigger =
10901 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10902 af_trigger.trigger_id =
10903 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10905 rc = BAD_VALUE;
10906 }
10907 LOGD("AfTrigger: %d AfTriggerID: %d",
10908 af_trigger.trigger, af_trigger.trigger_id);
10909 }
10910
10911 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10912 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10914 rc = BAD_VALUE;
10915 }
10916 }
10917 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10918 cam_edge_application_t edge_application;
10919 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10920 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10921 edge_application.sharpness = 0;
10922 } else {
10923 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10924 }
10925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10926 rc = BAD_VALUE;
10927 }
10928 }
10929
10930 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10931 int32_t respectFlashMode = 1;
10932 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10933 uint8_t fwk_aeMode =
10934 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10935 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10936 respectFlashMode = 0;
10937 LOGH("AE Mode controls flash, ignore android.flash.mode");
10938 }
10939 }
10940 if (respectFlashMode) {
10941 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10942 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10943 LOGH("flash mode after mapping %d", val);
10944 // To check: CAM_INTF_META_FLASH_MODE usage
10945 if (NAME_NOT_FOUND != val) {
10946 uint8_t flashMode = (uint8_t)val;
10947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10948 rc = BAD_VALUE;
10949 }
10950 }
10951 }
10952 }
10953
10954 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10955 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10956 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10957 rc = BAD_VALUE;
10958 }
10959 }
10960
10961 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10962 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10964 flashFiringTime)) {
10965 rc = BAD_VALUE;
10966 }
10967 }
10968
10969 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10970 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10972 hotPixelMode)) {
10973 rc = BAD_VALUE;
10974 }
10975 }
10976
10977 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10978 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10980 lensAperture)) {
10981 rc = BAD_VALUE;
10982 }
10983 }
10984
10985 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10986 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10988 filterDensity)) {
10989 rc = BAD_VALUE;
10990 }
10991 }
10992
10993 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10994 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10995 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10996 focalLength)) {
10997 rc = BAD_VALUE;
10998 }
10999 }
11000
11001 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11002 uint8_t optStabMode =
11003 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11005 optStabMode)) {
11006 rc = BAD_VALUE;
11007 }
11008 }
11009
11010 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11011 uint8_t videoStabMode =
11012 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11013 LOGD("videoStabMode from APP = %d", videoStabMode);
11014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11015 videoStabMode)) {
11016 rc = BAD_VALUE;
11017 }
11018 }
11019
11020
11021 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11022 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11024 noiseRedMode)) {
11025 rc = BAD_VALUE;
11026 }
11027 }
11028
11029 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11030 float reprocessEffectiveExposureFactor =
11031 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11033 reprocessEffectiveExposureFactor)) {
11034 rc = BAD_VALUE;
11035 }
11036 }
11037
11038 cam_crop_region_t scalerCropRegion;
11039 bool scalerCropSet = false;
11040 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11041 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11042 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11043 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11044 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11045
11046 // Map coordinate system from active array to sensor output.
11047 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11048 scalerCropRegion.width, scalerCropRegion.height);
11049
11050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11051 scalerCropRegion)) {
11052 rc = BAD_VALUE;
11053 }
11054 scalerCropSet = true;
11055 }
11056
11057 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11058 int64_t sensorExpTime =
11059 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11060 LOGD("setting sensorExpTime %lld", sensorExpTime);
11061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11062 sensorExpTime)) {
11063 rc = BAD_VALUE;
11064 }
11065 }
11066
11067 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11068 int64_t sensorFrameDuration =
11069 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
11070 int64_t minFrameDuration = getMinFrameDuration(request);
11071 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11072 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11073 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11074 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11076 sensorFrameDuration)) {
11077 rc = BAD_VALUE;
11078 }
11079 }
11080
11081 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11082 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11083 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11084 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11085 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11086 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11087 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11089 sensorSensitivity)) {
11090 rc = BAD_VALUE;
11091 }
11092 }
11093
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011094#ifndef USE_HAL_3_3
11095 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11096 int32_t ispSensitivity =
11097 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11098 if (ispSensitivity <
11099 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11100 ispSensitivity =
11101 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11102 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11103 }
11104 if (ispSensitivity >
11105 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11106 ispSensitivity =
11107 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11108 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11109 }
11110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11111 ispSensitivity)) {
11112 rc = BAD_VALUE;
11113 }
11114 }
11115#endif
11116
Thierry Strudel3d639192016-09-09 11:52:26 -070011117 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11118 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11120 rc = BAD_VALUE;
11121 }
11122 }
11123
11124 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11125 uint8_t fwk_facedetectMode =
11126 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11127
11128 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11129 fwk_facedetectMode);
11130
11131 if (NAME_NOT_FOUND != val) {
11132 uint8_t facedetectMode = (uint8_t)val;
11133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11134 facedetectMode)) {
11135 rc = BAD_VALUE;
11136 }
11137 }
11138 }
11139
11140 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
11141 uint8_t histogramMode =
11142 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
11143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11144 histogramMode)) {
11145 rc = BAD_VALUE;
11146 }
11147 }
11148
11149 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11150 uint8_t sharpnessMapMode =
11151 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11153 sharpnessMapMode)) {
11154 rc = BAD_VALUE;
11155 }
11156 }
11157
11158 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11159 uint8_t tonemapMode =
11160 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11162 rc = BAD_VALUE;
11163 }
11164 }
11165 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11166 /*All tonemap channels will have the same number of points*/
11167 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11168 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11169 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11170 cam_rgb_tonemap_curves tonemapCurves;
11171 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11172 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11173 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11174 tonemapCurves.tonemap_points_cnt,
11175 CAM_MAX_TONEMAP_CURVE_SIZE);
11176 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11177 }
11178
11179 /* ch0 = G*/
11180 size_t point = 0;
11181 cam_tonemap_curve_t tonemapCurveGreen;
11182 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11183 for (size_t j = 0; j < 2; j++) {
11184 tonemapCurveGreen.tonemap_points[i][j] =
11185 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11186 point++;
11187 }
11188 }
11189 tonemapCurves.curves[0] = tonemapCurveGreen;
11190
11191 /* ch 1 = B */
11192 point = 0;
11193 cam_tonemap_curve_t tonemapCurveBlue;
11194 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11195 for (size_t j = 0; j < 2; j++) {
11196 tonemapCurveBlue.tonemap_points[i][j] =
11197 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11198 point++;
11199 }
11200 }
11201 tonemapCurves.curves[1] = tonemapCurveBlue;
11202
11203 /* ch 2 = R */
11204 point = 0;
11205 cam_tonemap_curve_t tonemapCurveRed;
11206 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11207 for (size_t j = 0; j < 2; j++) {
11208 tonemapCurveRed.tonemap_points[i][j] =
11209 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11210 point++;
11211 }
11212 }
11213 tonemapCurves.curves[2] = tonemapCurveRed;
11214
11215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11216 tonemapCurves)) {
11217 rc = BAD_VALUE;
11218 }
11219 }
11220
11221 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11222 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11224 captureIntent)) {
11225 rc = BAD_VALUE;
11226 }
11227 }
11228
11229 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11230 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11232 blackLevelLock)) {
11233 rc = BAD_VALUE;
11234 }
11235 }
11236
11237 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11238 uint8_t lensShadingMapMode =
11239 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11241 lensShadingMapMode)) {
11242 rc = BAD_VALUE;
11243 }
11244 }
11245
11246 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11247 cam_area_t roi;
11248 bool reset = true;
11249 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
11250
11251 // Map coordinate system from active array to sensor output.
11252 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11253 roi.rect.height);
11254
11255 if (scalerCropSet) {
11256 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11257 }
11258 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11259 rc = BAD_VALUE;
11260 }
11261 }
11262
11263 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11264 cam_area_t roi;
11265 bool reset = true;
11266 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
11267
11268 // Map coordinate system from active array to sensor output.
11269 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11270 roi.rect.height);
11271
11272 if (scalerCropSet) {
11273 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11274 }
11275 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11276 rc = BAD_VALUE;
11277 }
11278 }
11279
11280 // CDS for non-HFR non-video mode
11281 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11282 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11283 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11284 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11285 LOGE("Invalid CDS mode %d!", *fwk_cds);
11286 } else {
11287 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11288 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11289 rc = BAD_VALUE;
11290 }
11291 }
11292 }
11293
Thierry Strudel04e026f2016-10-10 11:27:36 -070011294 // Video HDR
11295 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
11296 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
11297 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11298 rc = setVideoHdrMode(mParameters, vhdr);
11299 if (rc != NO_ERROR) {
11300 LOGE("setVideoHDR is failed");
11301 }
11302 }
11303
11304 //IR
11305 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11306 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11307 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
11308 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11309 LOGE("Invalid IR mode %d!", fwk_ir);
11310 } else {
11311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11312 CAM_INTF_META_IR_MODE, fwk_ir)) {
11313 rc = BAD_VALUE;
11314 }
11315 }
11316 }
11317
Thierry Strudel269c81a2016-10-12 12:13:59 -070011318 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11319 float aec_speed;
11320 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11321 LOGD("AEC Speed :%f", aec_speed);
11322 if ( aec_speed < 0 ) {
11323 LOGE("Invalid AEC mode %f!", aec_speed);
11324 } else {
11325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11326 aec_speed)) {
11327 rc = BAD_VALUE;
11328 }
11329 }
11330 }
11331
11332 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11333 float awb_speed;
11334 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11335 LOGD("AWB Speed :%f", awb_speed);
11336 if ( awb_speed < 0 ) {
11337 LOGE("Invalid AWB mode %f!", awb_speed);
11338 } else {
11339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11340 awb_speed)) {
11341 rc = BAD_VALUE;
11342 }
11343 }
11344 }
11345
Thierry Strudel3d639192016-09-09 11:52:26 -070011346 // TNR
11347 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11348 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11349 uint8_t b_TnrRequested = 0;
11350 cam_denoise_param_t tnr;
11351 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11352 tnr.process_plates =
11353 (cam_denoise_process_type_t)frame_settings.find(
11354 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11355 b_TnrRequested = tnr.denoise_enable;
11356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11357 rc = BAD_VALUE;
11358 }
11359 }
11360
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011361 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
11362 int32_t* exposure_metering_mode =
11363 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
11364 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11365 *exposure_metering_mode)) {
11366 rc = BAD_VALUE;
11367 }
11368 }
11369
Thierry Strudel3d639192016-09-09 11:52:26 -070011370 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11371 int32_t fwk_testPatternMode =
11372 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11373 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11374 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11375
11376 if (NAME_NOT_FOUND != testPatternMode) {
11377 cam_test_pattern_data_t testPatternData;
11378 memset(&testPatternData, 0, sizeof(testPatternData));
11379 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11380 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11381 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11382 int32_t *fwk_testPatternData =
11383 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11384 testPatternData.r = fwk_testPatternData[0];
11385 testPatternData.b = fwk_testPatternData[3];
11386 switch (gCamCapability[mCameraId]->color_arrangement) {
11387 case CAM_FILTER_ARRANGEMENT_RGGB:
11388 case CAM_FILTER_ARRANGEMENT_GRBG:
11389 testPatternData.gr = fwk_testPatternData[1];
11390 testPatternData.gb = fwk_testPatternData[2];
11391 break;
11392 case CAM_FILTER_ARRANGEMENT_GBRG:
11393 case CAM_FILTER_ARRANGEMENT_BGGR:
11394 testPatternData.gr = fwk_testPatternData[2];
11395 testPatternData.gb = fwk_testPatternData[1];
11396 break;
11397 default:
11398 LOGE("color arrangement %d is not supported",
11399 gCamCapability[mCameraId]->color_arrangement);
11400 break;
11401 }
11402 }
11403 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11404 testPatternData)) {
11405 rc = BAD_VALUE;
11406 }
11407 } else {
11408 LOGE("Invalid framework sensor test pattern mode %d",
11409 fwk_testPatternMode);
11410 }
11411 }
11412
11413 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11414 size_t count = 0;
11415 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11416 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11417 gps_coords.data.d, gps_coords.count, count);
11418 if (gps_coords.count != count) {
11419 rc = BAD_VALUE;
11420 }
11421 }
11422
11423 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11424 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11425 size_t count = 0;
11426 const char *gps_methods_src = (const char *)
11427 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11428 memset(gps_methods, '\0', sizeof(gps_methods));
11429 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11430 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11431 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11432 if (GPS_PROCESSING_METHOD_SIZE != count) {
11433 rc = BAD_VALUE;
11434 }
11435 }
11436
11437 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11438 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11440 gps_timestamp)) {
11441 rc = BAD_VALUE;
11442 }
11443 }
11444
11445 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11446 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11447 cam_rotation_info_t rotation_info;
11448 if (orientation == 0) {
11449 rotation_info.rotation = ROTATE_0;
11450 } else if (orientation == 90) {
11451 rotation_info.rotation = ROTATE_90;
11452 } else if (orientation == 180) {
11453 rotation_info.rotation = ROTATE_180;
11454 } else if (orientation == 270) {
11455 rotation_info.rotation = ROTATE_270;
11456 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011457 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011458 rotation_info.streamId = snapshotStreamId;
11459 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11461 rc = BAD_VALUE;
11462 }
11463 }
11464
11465 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11466 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11468 rc = BAD_VALUE;
11469 }
11470 }
11471
11472 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11473 uint32_t thumb_quality = (uint32_t)
11474 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11476 thumb_quality)) {
11477 rc = BAD_VALUE;
11478 }
11479 }
11480
11481 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11482 cam_dimension_t dim;
11483 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11484 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11486 rc = BAD_VALUE;
11487 }
11488 }
11489
11490 // Internal metadata
11491 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11492 size_t count = 0;
11493 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11494 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11495 privatedata.data.i32, privatedata.count, count);
11496 if (privatedata.count != count) {
11497 rc = BAD_VALUE;
11498 }
11499 }
11500
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011501 // ISO/Exposure Priority
11502 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11503 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11504 cam_priority_mode_t mode =
11505 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11506 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11507 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11508 use_iso_exp_pty.previewOnly = FALSE;
11509 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11510 use_iso_exp_pty.value = *ptr;
11511
11512 if(CAM_ISO_PRIORITY == mode) {
11513 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11514 use_iso_exp_pty)) {
11515 rc = BAD_VALUE;
11516 }
11517 }
11518 else {
11519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11520 use_iso_exp_pty)) {
11521 rc = BAD_VALUE;
11522 }
11523 }
11524 }
11525 }
11526
11527 // Saturation
11528 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11529 int32_t* use_saturation =
11530 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11532 rc = BAD_VALUE;
11533 }
11534 }
11535
Thierry Strudel3d639192016-09-09 11:52:26 -070011536 // EV step
11537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11538 gCamCapability[mCameraId]->exp_compensation_step)) {
11539 rc = BAD_VALUE;
11540 }
11541
11542 // CDS info
11543 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11544 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11545 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11546
11547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11548 CAM_INTF_META_CDS_DATA, *cdsData)) {
11549 rc = BAD_VALUE;
11550 }
11551 }
11552
Shuzhen Wang19463d72016-03-08 11:09:52 -080011553 // Hybrid AE
11554 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11555 uint8_t *hybrid_ae = (uint8_t *)
11556 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11557
11558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11559 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11560 rc = BAD_VALUE;
11561 }
11562 }
11563
Thierry Strudel3d639192016-09-09 11:52:26 -070011564 return rc;
11565}
11566
11567/*===========================================================================
11568 * FUNCTION : captureResultCb
11569 *
11570 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11571 *
11572 * PARAMETERS :
11573 * @frame : frame information from mm-camera-interface
11574 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11575 * @userdata: userdata
11576 *
11577 * RETURN : NONE
11578 *==========================================================================*/
11579void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11580 camera3_stream_buffer_t *buffer,
11581 uint32_t frame_number, bool isInputBuffer, void *userdata)
11582{
11583 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11584 if (hw == NULL) {
11585 LOGE("Invalid hw %p", hw);
11586 return;
11587 }
11588
11589 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11590 return;
11591}
11592
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011593/*===========================================================================
11594 * FUNCTION : setBufferErrorStatus
11595 *
11596 * DESCRIPTION: Callback handler for channels to report any buffer errors
11597 *
11598 * PARAMETERS :
11599 * @ch : Channel on which buffer error is reported from
11600 * @frame_number : frame number on which buffer error is reported on
11601 * @buffer_status : buffer error status
11602 * @userdata: userdata
11603 *
11604 * RETURN : NONE
11605 *==========================================================================*/
11606void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11607 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11608{
11609 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11610 if (hw == NULL) {
11611 LOGE("Invalid hw %p", hw);
11612 return;
11613 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011614
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011615 hw->setBufferErrorStatus(ch, frame_number, err);
11616 return;
11617}
11618
11619void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11620 uint32_t frameNumber, camera3_buffer_status_t err)
11621{
11622 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11623 pthread_mutex_lock(&mMutex);
11624
11625 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11626 if (req.frame_number != frameNumber)
11627 continue;
11628 for (auto& k : req.mPendingBufferList) {
11629 if(k.stream->priv == ch) {
11630 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11631 }
11632 }
11633 }
11634
11635 pthread_mutex_unlock(&mMutex);
11636 return;
11637}
Thierry Strudel3d639192016-09-09 11:52:26 -070011638/*===========================================================================
11639 * FUNCTION : initialize
11640 *
11641 * DESCRIPTION: Pass framework callback pointers to HAL
11642 *
11643 * PARAMETERS :
11644 *
11645 *
11646 * RETURN : Success : 0
11647 * Failure: -ENODEV
11648 *==========================================================================*/
11649
11650int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11651 const camera3_callback_ops_t *callback_ops)
11652{
11653 LOGD("E");
11654 QCamera3HardwareInterface *hw =
11655 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11656 if (!hw) {
11657 LOGE("NULL camera device");
11658 return -ENODEV;
11659 }
11660
11661 int rc = hw->initialize(callback_ops);
11662 LOGD("X");
11663 return rc;
11664}
11665
11666/*===========================================================================
11667 * FUNCTION : configure_streams
11668 *
11669 * DESCRIPTION:
11670 *
11671 * PARAMETERS :
11672 *
11673 *
11674 * RETURN : Success: 0
11675 * Failure: -EINVAL (if stream configuration is invalid)
11676 * -ENODEV (fatal error)
11677 *==========================================================================*/
11678
11679int QCamera3HardwareInterface::configure_streams(
11680 const struct camera3_device *device,
11681 camera3_stream_configuration_t *stream_list)
11682{
11683 LOGD("E");
11684 QCamera3HardwareInterface *hw =
11685 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11686 if (!hw) {
11687 LOGE("NULL camera device");
11688 return -ENODEV;
11689 }
11690 int rc = hw->configureStreams(stream_list);
11691 LOGD("X");
11692 return rc;
11693}
11694
11695/*===========================================================================
11696 * FUNCTION : construct_default_request_settings
11697 *
11698 * DESCRIPTION: Configure a settings buffer to meet the required use case
11699 *
11700 * PARAMETERS :
11701 *
11702 *
11703 * RETURN : Success: Return valid metadata
11704 * Failure: Return NULL
11705 *==========================================================================*/
11706const camera_metadata_t* QCamera3HardwareInterface::
11707 construct_default_request_settings(const struct camera3_device *device,
11708 int type)
11709{
11710
11711 LOGD("E");
11712 camera_metadata_t* fwk_metadata = NULL;
11713 QCamera3HardwareInterface *hw =
11714 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11715 if (!hw) {
11716 LOGE("NULL camera device");
11717 return NULL;
11718 }
11719
11720 fwk_metadata = hw->translateCapabilityToMetadata(type);
11721
11722 LOGD("X");
11723 return fwk_metadata;
11724}
11725
11726/*===========================================================================
11727 * FUNCTION : process_capture_request
11728 *
11729 * DESCRIPTION:
11730 *
11731 * PARAMETERS :
11732 *
11733 *
11734 * RETURN :
11735 *==========================================================================*/
11736int QCamera3HardwareInterface::process_capture_request(
11737 const struct camera3_device *device,
11738 camera3_capture_request_t *request)
11739{
11740 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011741 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011742 QCamera3HardwareInterface *hw =
11743 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11744 if (!hw) {
11745 LOGE("NULL camera device");
11746 return -EINVAL;
11747 }
11748
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011749 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011750 LOGD("X");
11751 return rc;
11752}
11753
11754/*===========================================================================
11755 * FUNCTION : dump
11756 *
11757 * DESCRIPTION:
11758 *
11759 * PARAMETERS :
11760 *
11761 *
11762 * RETURN :
11763 *==========================================================================*/
11764
11765void QCamera3HardwareInterface::dump(
11766 const struct camera3_device *device, int fd)
11767{
11768 /* Log level property is read when "adb shell dumpsys media.camera" is
11769 called so that the log level can be controlled without restarting
11770 the media server */
11771 getLogLevel();
11772
11773 LOGD("E");
11774 QCamera3HardwareInterface *hw =
11775 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11776 if (!hw) {
11777 LOGE("NULL camera device");
11778 return;
11779 }
11780
11781 hw->dump(fd);
11782 LOGD("X");
11783 return;
11784}
11785
11786/*===========================================================================
11787 * FUNCTION : flush
11788 *
11789 * DESCRIPTION:
11790 *
11791 * PARAMETERS :
11792 *
11793 *
11794 * RETURN :
11795 *==========================================================================*/
11796
11797int QCamera3HardwareInterface::flush(
11798 const struct camera3_device *device)
11799{
11800 int rc;
11801 LOGD("E");
11802 QCamera3HardwareInterface *hw =
11803 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11804 if (!hw) {
11805 LOGE("NULL camera device");
11806 return -EINVAL;
11807 }
11808
11809 pthread_mutex_lock(&hw->mMutex);
11810 // Validate current state
11811 switch (hw->mState) {
11812 case STARTED:
11813 /* valid state */
11814 break;
11815
11816 case ERROR:
11817 pthread_mutex_unlock(&hw->mMutex);
11818 hw->handleCameraDeviceError();
11819 return -ENODEV;
11820
11821 default:
11822 LOGI("Flush returned during state %d", hw->mState);
11823 pthread_mutex_unlock(&hw->mMutex);
11824 return 0;
11825 }
11826 pthread_mutex_unlock(&hw->mMutex);
11827
11828 rc = hw->flush(true /* restart channels */ );
11829 LOGD("X");
11830 return rc;
11831}
11832
11833/*===========================================================================
11834 * FUNCTION : close_camera_device
11835 *
11836 * DESCRIPTION:
11837 *
11838 * PARAMETERS :
11839 *
11840 *
11841 * RETURN :
11842 *==========================================================================*/
11843int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11844{
11845 int ret = NO_ERROR;
11846 QCamera3HardwareInterface *hw =
11847 reinterpret_cast<QCamera3HardwareInterface *>(
11848 reinterpret_cast<camera3_device_t *>(device)->priv);
11849 if (!hw) {
11850 LOGE("NULL camera device");
11851 return BAD_VALUE;
11852 }
11853
11854 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11855 delete hw;
11856 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011857 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011858 return ret;
11859}
11860
11861/*===========================================================================
11862 * FUNCTION : getWaveletDenoiseProcessPlate
11863 *
11864 * DESCRIPTION: query wavelet denoise process plate
11865 *
11866 * PARAMETERS : None
11867 *
11868 * RETURN : WNR prcocess plate value
11869 *==========================================================================*/
11870cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11871{
11872 char prop[PROPERTY_VALUE_MAX];
11873 memset(prop, 0, sizeof(prop));
11874 property_get("persist.denoise.process.plates", prop, "0");
11875 int processPlate = atoi(prop);
11876 switch(processPlate) {
11877 case 0:
11878 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11879 case 1:
11880 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11881 case 2:
11882 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11883 case 3:
11884 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11885 default:
11886 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11887 }
11888}
11889
11890
11891/*===========================================================================
11892 * FUNCTION : getTemporalDenoiseProcessPlate
11893 *
11894 * DESCRIPTION: query temporal denoise process plate
11895 *
11896 * PARAMETERS : None
11897 *
11898 * RETURN : TNR prcocess plate value
11899 *==========================================================================*/
11900cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11901{
11902 char prop[PROPERTY_VALUE_MAX];
11903 memset(prop, 0, sizeof(prop));
11904 property_get("persist.tnr.process.plates", prop, "0");
11905 int processPlate = atoi(prop);
11906 switch(processPlate) {
11907 case 0:
11908 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11909 case 1:
11910 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11911 case 2:
11912 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11913 case 3:
11914 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11915 default:
11916 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11917 }
11918}
11919
11920
11921/*===========================================================================
11922 * FUNCTION : extractSceneMode
11923 *
11924 * DESCRIPTION: Extract scene mode from frameworks set metadata
11925 *
11926 * PARAMETERS :
11927 * @frame_settings: CameraMetadata reference
11928 * @metaMode: ANDROID_CONTORL_MODE
11929 * @hal_metadata: hal metadata structure
11930 *
11931 * RETURN : None
11932 *==========================================================================*/
11933int32_t QCamera3HardwareInterface::extractSceneMode(
11934 const CameraMetadata &frame_settings, uint8_t metaMode,
11935 metadata_buffer_t *hal_metadata)
11936{
11937 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011938 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11939
11940 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
11941 LOGD("Ignoring control mode OFF_KEEP_STATE");
11942 return NO_ERROR;
11943 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011944
11945 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11946 camera_metadata_ro_entry entry =
11947 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11948 if (0 == entry.count)
11949 return rc;
11950
11951 uint8_t fwk_sceneMode = entry.data.u8[0];
11952
11953 int val = lookupHalName(SCENE_MODES_MAP,
11954 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11955 fwk_sceneMode);
11956 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011957 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070011958 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070011959 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011960 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011961
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011962 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
11963 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
11964 }
11965
11966 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
11967 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011968 cam_hdr_param_t hdr_params;
11969 hdr_params.hdr_enable = 1;
11970 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11971 hdr_params.hdr_need_1x = false;
11972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11973 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11974 rc = BAD_VALUE;
11975 }
11976 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011977
Thierry Strudel3d639192016-09-09 11:52:26 -070011978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11979 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11980 rc = BAD_VALUE;
11981 }
11982 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011983
11984 if (mForceHdrSnapshot) {
11985 cam_hdr_param_t hdr_params;
11986 hdr_params.hdr_enable = 1;
11987 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11988 hdr_params.hdr_need_1x = false;
11989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11990 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11991 rc = BAD_VALUE;
11992 }
11993 }
11994
Thierry Strudel3d639192016-09-09 11:52:26 -070011995 return rc;
11996}
11997
11998/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011999 * FUNCTION : setVideoHdrMode
12000 *
12001 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12002 *
12003 * PARAMETERS :
12004 * @hal_metadata: hal metadata structure
12005 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12006 *
12007 * RETURN : None
12008 *==========================================================================*/
12009int32_t QCamera3HardwareInterface::setVideoHdrMode(
12010 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12011{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012012 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12013 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12014 }
12015
12016 LOGE("Invalid Video HDR mode %d!", vhdr);
12017 return BAD_VALUE;
12018}
12019
12020/*===========================================================================
12021 * FUNCTION : setSensorHDR
12022 *
12023 * DESCRIPTION: Enable/disable sensor HDR.
12024 *
12025 * PARAMETERS :
12026 * @hal_metadata: hal metadata structure
12027 * @enable: boolean whether to enable/disable sensor HDR
12028 *
12029 * RETURN : None
12030 *==========================================================================*/
12031int32_t QCamera3HardwareInterface::setSensorHDR(
12032 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12033{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012034 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012035 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12036
12037 if (enable) {
12038 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12039 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12040 #ifdef _LE_CAMERA_
12041 //Default to staggered HDR for IOT
12042 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12043 #else
12044 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12045 #endif
12046 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12047 }
12048
12049 bool isSupported = false;
12050 switch (sensor_hdr) {
12051 case CAM_SENSOR_HDR_IN_SENSOR:
12052 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12053 CAM_QCOM_FEATURE_SENSOR_HDR) {
12054 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012055 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012056 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012057 break;
12058 case CAM_SENSOR_HDR_ZIGZAG:
12059 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12060 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12061 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012062 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012063 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012064 break;
12065 case CAM_SENSOR_HDR_STAGGERED:
12066 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12067 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12068 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012069 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012070 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012071 break;
12072 case CAM_SENSOR_HDR_OFF:
12073 isSupported = true;
12074 LOGD("Turning off sensor HDR");
12075 break;
12076 default:
12077 LOGE("HDR mode %d not supported", sensor_hdr);
12078 rc = BAD_VALUE;
12079 break;
12080 }
12081
12082 if(isSupported) {
12083 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12084 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12085 rc = BAD_VALUE;
12086 } else {
12087 if(!isVideoHdrEnable)
12088 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012089 }
12090 }
12091 return rc;
12092}
12093
12094/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012095 * FUNCTION : needRotationReprocess
12096 *
12097 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12098 *
12099 * PARAMETERS : none
12100 *
12101 * RETURN : true: needed
12102 * false: no need
12103 *==========================================================================*/
12104bool QCamera3HardwareInterface::needRotationReprocess()
12105{
12106 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12107 // current rotation is not zero, and pp has the capability to process rotation
12108 LOGH("need do reprocess for rotation");
12109 return true;
12110 }
12111
12112 return false;
12113}
12114
12115/*===========================================================================
12116 * FUNCTION : needReprocess
12117 *
12118 * DESCRIPTION: if reprocess in needed
12119 *
12120 * PARAMETERS : none
12121 *
12122 * RETURN : true: needed
12123 * false: no need
12124 *==========================================================================*/
12125bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12126{
12127 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12128 // TODO: add for ZSL HDR later
12129 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12130 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12131 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12132 return true;
12133 } else {
12134 LOGH("already post processed frame");
12135 return false;
12136 }
12137 }
12138 return needRotationReprocess();
12139}
12140
12141/*===========================================================================
12142 * FUNCTION : needJpegExifRotation
12143 *
12144 * DESCRIPTION: if rotation from jpeg is needed
12145 *
12146 * PARAMETERS : none
12147 *
12148 * RETURN : true: needed
12149 * false: no need
12150 *==========================================================================*/
12151bool QCamera3HardwareInterface::needJpegExifRotation()
12152{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012153 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012154 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12155 LOGD("Need use Jpeg EXIF Rotation");
12156 return true;
12157 }
12158 return false;
12159}
12160
12161/*===========================================================================
12162 * FUNCTION : addOfflineReprocChannel
12163 *
12164 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12165 * coming from input channel
12166 *
12167 * PARAMETERS :
12168 * @config : reprocess configuration
12169 * @inputChHandle : pointer to the input (source) channel
12170 *
12171 *
12172 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12173 *==========================================================================*/
12174QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12175 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12176{
12177 int32_t rc = NO_ERROR;
12178 QCamera3ReprocessChannel *pChannel = NULL;
12179
12180 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012181 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12182 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012183 if (NULL == pChannel) {
12184 LOGE("no mem for reprocess channel");
12185 return NULL;
12186 }
12187
12188 rc = pChannel->initialize(IS_TYPE_NONE);
12189 if (rc != NO_ERROR) {
12190 LOGE("init reprocess channel failed, ret = %d", rc);
12191 delete pChannel;
12192 return NULL;
12193 }
12194
12195 // pp feature config
12196 cam_pp_feature_config_t pp_config;
12197 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12198
12199 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12200 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12201 & CAM_QCOM_FEATURE_DSDN) {
12202 //Use CPP CDS incase h/w supports it.
12203 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12204 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12205 }
12206 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12207 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12208 }
12209
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012210 if (config.hdr_param.hdr_enable) {
12211 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12212 pp_config.hdr_param = config.hdr_param;
12213 }
12214
12215 if (mForceHdrSnapshot) {
12216 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12217 pp_config.hdr_param.hdr_enable = 1;
12218 pp_config.hdr_param.hdr_need_1x = 0;
12219 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12220 }
12221
Thierry Strudel3d639192016-09-09 11:52:26 -070012222 rc = pChannel->addReprocStreamsFromSource(pp_config,
12223 config,
12224 IS_TYPE_NONE,
12225 mMetadataChannel);
12226
12227 if (rc != NO_ERROR) {
12228 delete pChannel;
12229 return NULL;
12230 }
12231 return pChannel;
12232}
12233
12234/*===========================================================================
12235 * FUNCTION : getMobicatMask
12236 *
12237 * DESCRIPTION: returns mobicat mask
12238 *
12239 * PARAMETERS : none
12240 *
12241 * RETURN : mobicat mask
12242 *
12243 *==========================================================================*/
12244uint8_t QCamera3HardwareInterface::getMobicatMask()
12245{
12246 return m_MobicatMask;
12247}
12248
12249/*===========================================================================
12250 * FUNCTION : setMobicat
12251 *
12252 * DESCRIPTION: set Mobicat on/off.
12253 *
12254 * PARAMETERS :
12255 * @params : none
12256 *
12257 * RETURN : int32_t type of status
12258 * NO_ERROR -- success
12259 * none-zero failure code
12260 *==========================================================================*/
12261int32_t QCamera3HardwareInterface::setMobicat()
12262{
12263 char value [PROPERTY_VALUE_MAX];
12264 property_get("persist.camera.mobicat", value, "0");
12265 int32_t ret = NO_ERROR;
12266 uint8_t enableMobi = (uint8_t)atoi(value);
12267
12268 if (enableMobi) {
12269 tune_cmd_t tune_cmd;
12270 tune_cmd.type = SET_RELOAD_CHROMATIX;
12271 tune_cmd.module = MODULE_ALL;
12272 tune_cmd.value = TRUE;
12273 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12274 CAM_INTF_PARM_SET_VFE_COMMAND,
12275 tune_cmd);
12276
12277 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12278 CAM_INTF_PARM_SET_PP_COMMAND,
12279 tune_cmd);
12280 }
12281 m_MobicatMask = enableMobi;
12282
12283 return ret;
12284}
12285
12286/*===========================================================================
12287* FUNCTION : getLogLevel
12288*
12289* DESCRIPTION: Reads the log level property into a variable
12290*
12291* PARAMETERS :
12292* None
12293*
12294* RETURN :
12295* None
12296*==========================================================================*/
12297void QCamera3HardwareInterface::getLogLevel()
12298{
12299 char prop[PROPERTY_VALUE_MAX];
12300 uint32_t globalLogLevel = 0;
12301
12302 property_get("persist.camera.hal.debug", prop, "0");
12303 int val = atoi(prop);
12304 if (0 <= val) {
12305 gCamHal3LogLevel = (uint32_t)val;
12306 }
12307
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012308 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012309 gKpiDebugLevel = atoi(prop);
12310
12311 property_get("persist.camera.global.debug", prop, "0");
12312 val = atoi(prop);
12313 if (0 <= val) {
12314 globalLogLevel = (uint32_t)val;
12315 }
12316
12317 /* Highest log level among hal.logs and global.logs is selected */
12318 if (gCamHal3LogLevel < globalLogLevel)
12319 gCamHal3LogLevel = globalLogLevel;
12320
12321 return;
12322}
12323
12324/*===========================================================================
12325 * FUNCTION : validateStreamRotations
12326 *
12327 * DESCRIPTION: Check if the rotations requested are supported
12328 *
12329 * PARAMETERS :
12330 * @stream_list : streams to be configured
12331 *
12332 * RETURN : NO_ERROR on success
12333 * -EINVAL on failure
12334 *
12335 *==========================================================================*/
12336int QCamera3HardwareInterface::validateStreamRotations(
12337 camera3_stream_configuration_t *streamList)
12338{
12339 int rc = NO_ERROR;
12340
12341 /*
12342 * Loop through all streams requested in configuration
12343 * Check if unsupported rotations have been requested on any of them
12344 */
12345 for (size_t j = 0; j < streamList->num_streams; j++){
12346 camera3_stream_t *newStream = streamList->streams[j];
12347
12348 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12349 bool isImplDef = (newStream->format ==
12350 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12351 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12352 isImplDef);
12353
12354 if (isRotated && (!isImplDef || isZsl)) {
12355 LOGE("Error: Unsupported rotation of %d requested for stream"
12356 "type:%d and stream format:%d",
12357 newStream->rotation, newStream->stream_type,
12358 newStream->format);
12359 rc = -EINVAL;
12360 break;
12361 }
12362 }
12363
12364 return rc;
12365}
12366
12367/*===========================================================================
12368* FUNCTION : getFlashInfo
12369*
12370* DESCRIPTION: Retrieve information about whether the device has a flash.
12371*
12372* PARAMETERS :
12373* @cameraId : Camera id to query
12374* @hasFlash : Boolean indicating whether there is a flash device
12375* associated with given camera
12376* @flashNode : If a flash device exists, this will be its device node.
12377*
12378* RETURN :
12379* None
12380*==========================================================================*/
12381void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12382 bool& hasFlash,
12383 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12384{
12385 cam_capability_t* camCapability = gCamCapability[cameraId];
12386 if (NULL == camCapability) {
12387 hasFlash = false;
12388 flashNode[0] = '\0';
12389 } else {
12390 hasFlash = camCapability->flash_available;
12391 strlcpy(flashNode,
12392 (char*)camCapability->flash_dev_name,
12393 QCAMERA_MAX_FILEPATH_LENGTH);
12394 }
12395}
12396
12397/*===========================================================================
12398* FUNCTION : getEepromVersionInfo
12399*
12400* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12401*
12402* PARAMETERS : None
12403*
12404* RETURN : string describing EEPROM version
12405* "\0" if no such info available
12406*==========================================================================*/
12407const char *QCamera3HardwareInterface::getEepromVersionInfo()
12408{
12409 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12410}
12411
12412/*===========================================================================
12413* FUNCTION : getLdafCalib
12414*
12415* DESCRIPTION: Retrieve Laser AF calibration data
12416*
12417* PARAMETERS : None
12418*
12419* RETURN : Two uint32_t describing laser AF calibration data
12420* NULL if none is available.
12421*==========================================================================*/
12422const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12423{
12424 if (mLdafCalibExist) {
12425 return &mLdafCalib[0];
12426 } else {
12427 return NULL;
12428 }
12429}
12430
12431/*===========================================================================
12432 * FUNCTION : dynamicUpdateMetaStreamInfo
12433 *
12434 * DESCRIPTION: This function:
12435 * (1) stops all the channels
12436 * (2) returns error on pending requests and buffers
12437 * (3) sends metastream_info in setparams
12438 * (4) starts all channels
12439 * This is useful when sensor has to be restarted to apply any
12440 * settings such as frame rate from a different sensor mode
12441 *
12442 * PARAMETERS : None
12443 *
12444 * RETURN : NO_ERROR on success
12445 * Error codes on failure
12446 *
12447 *==========================================================================*/
12448int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12449{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012450 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012451 int rc = NO_ERROR;
12452
12453 LOGD("E");
12454
12455 rc = stopAllChannels();
12456 if (rc < 0) {
12457 LOGE("stopAllChannels failed");
12458 return rc;
12459 }
12460
12461 rc = notifyErrorForPendingRequests();
12462 if (rc < 0) {
12463 LOGE("notifyErrorForPendingRequests failed");
12464 return rc;
12465 }
12466
12467 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12468 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12469 "Format:%d",
12470 mStreamConfigInfo.type[i],
12471 mStreamConfigInfo.stream_sizes[i].width,
12472 mStreamConfigInfo.stream_sizes[i].height,
12473 mStreamConfigInfo.postprocess_mask[i],
12474 mStreamConfigInfo.format[i]);
12475 }
12476
12477 /* Send meta stream info once again so that ISP can start */
12478 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12479 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12480 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12481 mParameters);
12482 if (rc < 0) {
12483 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12484 }
12485
12486 rc = startAllChannels();
12487 if (rc < 0) {
12488 LOGE("startAllChannels failed");
12489 return rc;
12490 }
12491
12492 LOGD("X");
12493 return rc;
12494}
12495
12496/*===========================================================================
12497 * FUNCTION : stopAllChannels
12498 *
12499 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12500 *
12501 * PARAMETERS : None
12502 *
12503 * RETURN : NO_ERROR on success
12504 * Error codes on failure
12505 *
12506 *==========================================================================*/
12507int32_t QCamera3HardwareInterface::stopAllChannels()
12508{
12509 int32_t rc = NO_ERROR;
12510
12511 LOGD("Stopping all channels");
12512 // Stop the Streams/Channels
12513 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12514 it != mStreamInfo.end(); it++) {
12515 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12516 if (channel) {
12517 channel->stop();
12518 }
12519 (*it)->status = INVALID;
12520 }
12521
12522 if (mSupportChannel) {
12523 mSupportChannel->stop();
12524 }
12525 if (mAnalysisChannel) {
12526 mAnalysisChannel->stop();
12527 }
12528 if (mRawDumpChannel) {
12529 mRawDumpChannel->stop();
12530 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012531 if (mHdrPlusRawSrcChannel) {
12532 mHdrPlusRawSrcChannel->stop();
12533 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012534 if (mMetadataChannel) {
12535 /* If content of mStreamInfo is not 0, there is metadata stream */
12536 mMetadataChannel->stop();
12537 }
12538
12539 LOGD("All channels stopped");
12540 return rc;
12541}
12542
12543/*===========================================================================
12544 * FUNCTION : startAllChannels
12545 *
12546 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12547 *
12548 * PARAMETERS : None
12549 *
12550 * RETURN : NO_ERROR on success
12551 * Error codes on failure
12552 *
12553 *==========================================================================*/
12554int32_t QCamera3HardwareInterface::startAllChannels()
12555{
12556 int32_t rc = NO_ERROR;
12557
12558 LOGD("Start all channels ");
12559 // Start the Streams/Channels
12560 if (mMetadataChannel) {
12561 /* If content of mStreamInfo is not 0, there is metadata stream */
12562 rc = mMetadataChannel->start();
12563 if (rc < 0) {
12564 LOGE("META channel start failed");
12565 return rc;
12566 }
12567 }
12568 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12569 it != mStreamInfo.end(); it++) {
12570 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12571 if (channel) {
12572 rc = channel->start();
12573 if (rc < 0) {
12574 LOGE("channel start failed");
12575 return rc;
12576 }
12577 }
12578 }
12579 if (mAnalysisChannel) {
12580 mAnalysisChannel->start();
12581 }
12582 if (mSupportChannel) {
12583 rc = mSupportChannel->start();
12584 if (rc < 0) {
12585 LOGE("Support channel start failed");
12586 return rc;
12587 }
12588 }
12589 if (mRawDumpChannel) {
12590 rc = mRawDumpChannel->start();
12591 if (rc < 0) {
12592 LOGE("RAW dump channel start failed");
12593 return rc;
12594 }
12595 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012596 if (mHdrPlusRawSrcChannel) {
12597 rc = mHdrPlusRawSrcChannel->start();
12598 if (rc < 0) {
12599 LOGE("HDR+ RAW channel start failed");
12600 return rc;
12601 }
12602 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012603
12604 LOGD("All channels started");
12605 return rc;
12606}
12607
12608/*===========================================================================
12609 * FUNCTION : notifyErrorForPendingRequests
12610 *
12611 * DESCRIPTION: This function sends error for all the pending requests/buffers
12612 *
12613 * PARAMETERS : None
12614 *
12615 * RETURN : Error codes
12616 * NO_ERROR on success
12617 *
12618 *==========================================================================*/
12619int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12620{
12621 int32_t rc = NO_ERROR;
12622 unsigned int frameNum = 0;
12623 camera3_capture_result_t result;
12624 camera3_stream_buffer_t *pStream_Buf = NULL;
12625
12626 memset(&result, 0, sizeof(camera3_capture_result_t));
12627
12628 if (mPendingRequestsList.size() > 0) {
12629 pendingRequestIterator i = mPendingRequestsList.begin();
12630 frameNum = i->frame_number;
12631 } else {
12632 /* There might still be pending buffers even though there are
12633 no pending requests. Setting the frameNum to MAX so that
12634 all the buffers with smaller frame numbers are returned */
12635 frameNum = UINT_MAX;
12636 }
12637
12638 LOGH("Oldest frame num on mPendingRequestsList = %u",
12639 frameNum);
12640
12641 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12642 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12643
12644 if (req->frame_number < frameNum) {
12645 // Send Error notify to frameworks for each buffer for which
12646 // metadata buffer is already sent
12647 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12648 req->frame_number, req->mPendingBufferList.size());
12649
12650 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12651 if (NULL == pStream_Buf) {
12652 LOGE("No memory for pending buffers array");
12653 return NO_MEMORY;
12654 }
12655 memset(pStream_Buf, 0,
12656 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12657 result.result = NULL;
12658 result.frame_number = req->frame_number;
12659 result.num_output_buffers = req->mPendingBufferList.size();
12660 result.output_buffers = pStream_Buf;
12661
12662 size_t index = 0;
12663 for (auto info = req->mPendingBufferList.begin();
12664 info != req->mPendingBufferList.end(); ) {
12665
12666 camera3_notify_msg_t notify_msg;
12667 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12668 notify_msg.type = CAMERA3_MSG_ERROR;
12669 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12670 notify_msg.message.error.error_stream = info->stream;
12671 notify_msg.message.error.frame_number = req->frame_number;
12672 pStream_Buf[index].acquire_fence = -1;
12673 pStream_Buf[index].release_fence = -1;
12674 pStream_Buf[index].buffer = info->buffer;
12675 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12676 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012677 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012678 index++;
12679 // Remove buffer from list
12680 info = req->mPendingBufferList.erase(info);
12681 }
12682
12683 // Remove this request from Map
12684 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12685 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12686 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12687
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012688 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012689
12690 delete [] pStream_Buf;
12691 } else {
12692
12693 // Go through the pending requests info and send error request to framework
12694 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12695
12696 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12697
12698 // Send error notify to frameworks
12699 camera3_notify_msg_t notify_msg;
12700 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12701 notify_msg.type = CAMERA3_MSG_ERROR;
12702 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12703 notify_msg.message.error.error_stream = NULL;
12704 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012705 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012706
12707 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12708 if (NULL == pStream_Buf) {
12709 LOGE("No memory for pending buffers array");
12710 return NO_MEMORY;
12711 }
12712 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12713
12714 result.result = NULL;
12715 result.frame_number = req->frame_number;
12716 result.input_buffer = i->input_buffer;
12717 result.num_output_buffers = req->mPendingBufferList.size();
12718 result.output_buffers = pStream_Buf;
12719
12720 size_t index = 0;
12721 for (auto info = req->mPendingBufferList.begin();
12722 info != req->mPendingBufferList.end(); ) {
12723 pStream_Buf[index].acquire_fence = -1;
12724 pStream_Buf[index].release_fence = -1;
12725 pStream_Buf[index].buffer = info->buffer;
12726 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12727 pStream_Buf[index].stream = info->stream;
12728 index++;
12729 // Remove buffer from list
12730 info = req->mPendingBufferList.erase(info);
12731 }
12732
12733 // Remove this request from Map
12734 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12735 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12736 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12737
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012738 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012739 delete [] pStream_Buf;
12740 i = erasePendingRequest(i);
12741 }
12742 }
12743
12744 /* Reset pending frame Drop list and requests list */
12745 mPendingFrameDropList.clear();
12746
12747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12748 req.mPendingBufferList.clear();
12749 }
12750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070012751 LOGH("Cleared all the pending buffers ");
12752
12753 return rc;
12754}
12755
12756bool QCamera3HardwareInterface::isOnEncoder(
12757 const cam_dimension_t max_viewfinder_size,
12758 uint32_t width, uint32_t height)
12759{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012760 return ((width > (uint32_t)max_viewfinder_size.width) ||
12761 (height > (uint32_t)max_viewfinder_size.height) ||
12762 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12763 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012764}
12765
12766/*===========================================================================
12767 * FUNCTION : setBundleInfo
12768 *
12769 * DESCRIPTION: Set bundle info for all streams that are bundle.
12770 *
12771 * PARAMETERS : None
12772 *
12773 * RETURN : NO_ERROR on success
12774 * Error codes on failure
12775 *==========================================================================*/
12776int32_t QCamera3HardwareInterface::setBundleInfo()
12777{
12778 int32_t rc = NO_ERROR;
12779
12780 if (mChannelHandle) {
12781 cam_bundle_config_t bundleInfo;
12782 memset(&bundleInfo, 0, sizeof(bundleInfo));
12783 rc = mCameraHandle->ops->get_bundle_info(
12784 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12785 if (rc != NO_ERROR) {
12786 LOGE("get_bundle_info failed");
12787 return rc;
12788 }
12789 if (mAnalysisChannel) {
12790 mAnalysisChannel->setBundleInfo(bundleInfo);
12791 }
12792 if (mSupportChannel) {
12793 mSupportChannel->setBundleInfo(bundleInfo);
12794 }
12795 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12796 it != mStreamInfo.end(); it++) {
12797 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12798 channel->setBundleInfo(bundleInfo);
12799 }
12800 if (mRawDumpChannel) {
12801 mRawDumpChannel->setBundleInfo(bundleInfo);
12802 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012803 if (mHdrPlusRawSrcChannel) {
12804 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
12805 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012806 }
12807
12808 return rc;
12809}
12810
12811/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012812 * FUNCTION : setInstantAEC
12813 *
12814 * DESCRIPTION: Set Instant AEC related params.
12815 *
12816 * PARAMETERS :
12817 * @meta: CameraMetadata reference
12818 *
12819 * RETURN : NO_ERROR on success
12820 * Error codes on failure
12821 *==========================================================================*/
12822int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12823{
12824 int32_t rc = NO_ERROR;
12825 uint8_t val = 0;
12826 char prop[PROPERTY_VALUE_MAX];
12827
12828 // First try to configure instant AEC from framework metadata
12829 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12830 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12831 }
12832
12833 // If framework did not set this value, try to read from set prop.
12834 if (val == 0) {
12835 memset(prop, 0, sizeof(prop));
12836 property_get("persist.camera.instant.aec", prop, "0");
12837 val = (uint8_t)atoi(prop);
12838 }
12839
12840 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12841 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12842 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12843 mInstantAEC = val;
12844 mInstantAECSettledFrameNumber = 0;
12845 mInstantAecFrameIdxCount = 0;
12846 LOGH("instantAEC value set %d",val);
12847 if (mInstantAEC) {
12848 memset(prop, 0, sizeof(prop));
12849 property_get("persist.camera.ae.instant.bound", prop, "10");
12850 int32_t aec_frame_skip_cnt = atoi(prop);
12851 if (aec_frame_skip_cnt >= 0) {
12852 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12853 } else {
12854 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12855 rc = BAD_VALUE;
12856 }
12857 }
12858 } else {
12859 LOGE("Bad instant aec value set %d", val);
12860 rc = BAD_VALUE;
12861 }
12862 return rc;
12863}
12864
12865/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012866 * FUNCTION : get_num_overall_buffers
12867 *
12868 * DESCRIPTION: Estimate number of pending buffers across all requests.
12869 *
12870 * PARAMETERS : None
12871 *
12872 * RETURN : Number of overall pending buffers
12873 *
12874 *==========================================================================*/
12875uint32_t PendingBuffersMap::get_num_overall_buffers()
12876{
12877 uint32_t sum_buffers = 0;
12878 for (auto &req : mPendingBuffersInRequest) {
12879 sum_buffers += req.mPendingBufferList.size();
12880 }
12881 return sum_buffers;
12882}
12883
12884/*===========================================================================
12885 * FUNCTION : removeBuf
12886 *
12887 * DESCRIPTION: Remove a matching buffer from tracker.
12888 *
12889 * PARAMETERS : @buffer: image buffer for the callback
12890 *
12891 * RETURN : None
12892 *
12893 *==========================================================================*/
12894void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12895{
12896 bool buffer_found = false;
12897 for (auto req = mPendingBuffersInRequest.begin();
12898 req != mPendingBuffersInRequest.end(); req++) {
12899 for (auto k = req->mPendingBufferList.begin();
12900 k != req->mPendingBufferList.end(); k++ ) {
12901 if (k->buffer == buffer) {
12902 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12903 req->frame_number, buffer);
12904 k = req->mPendingBufferList.erase(k);
12905 if (req->mPendingBufferList.empty()) {
12906 // Remove this request from Map
12907 req = mPendingBuffersInRequest.erase(req);
12908 }
12909 buffer_found = true;
12910 break;
12911 }
12912 }
12913 if (buffer_found) {
12914 break;
12915 }
12916 }
12917 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12918 get_num_overall_buffers());
12919}
12920
12921/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012922 * FUNCTION : getBufErrStatus
12923 *
12924 * DESCRIPTION: get buffer error status
12925 *
12926 * PARAMETERS : @buffer: buffer handle
12927 *
12928 * RETURN : Error status
12929 *
12930 *==========================================================================*/
12931int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12932{
12933 for (auto& req : mPendingBuffersInRequest) {
12934 for (auto& k : req.mPendingBufferList) {
12935 if (k.buffer == buffer)
12936 return k.bufStatus;
12937 }
12938 }
12939 return CAMERA3_BUFFER_STATUS_OK;
12940}
12941
12942/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012943 * FUNCTION : setPAAFSupport
12944 *
12945 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12946 * feature mask according to stream type and filter
12947 * arrangement
12948 *
12949 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12950 * @stream_type: stream type
12951 * @filter_arrangement: filter arrangement
12952 *
12953 * RETURN : None
12954 *==========================================================================*/
12955void QCamera3HardwareInterface::setPAAFSupport(
12956 cam_feature_mask_t& feature_mask,
12957 cam_stream_type_t stream_type,
12958 cam_color_filter_arrangement_t filter_arrangement)
12959{
12960 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12961 feature_mask, stream_type, filter_arrangement);
12962
12963 switch (filter_arrangement) {
12964 case CAM_FILTER_ARRANGEMENT_RGGB:
12965 case CAM_FILTER_ARRANGEMENT_GRBG:
12966 case CAM_FILTER_ARRANGEMENT_GBRG:
12967 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012968 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12969 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012970 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12971 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12972 }
12973 break;
12974 case CAM_FILTER_ARRANGEMENT_Y:
12975 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12976 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12977 }
12978 break;
12979 default:
12980 break;
12981 }
12982}
12983
12984/*===========================================================================
12985* FUNCTION : getSensorMountAngle
12986*
12987* DESCRIPTION: Retrieve sensor mount angle
12988*
12989* PARAMETERS : None
12990*
12991* RETURN : sensor mount angle in uint32_t
12992*==========================================================================*/
12993uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12994{
12995 return gCamCapability[mCameraId]->sensor_mount_angle;
12996}
12997
12998/*===========================================================================
12999* FUNCTION : getRelatedCalibrationData
13000*
13001* DESCRIPTION: Retrieve related system calibration data
13002*
13003* PARAMETERS : None
13004*
13005* RETURN : Pointer of related system calibration data
13006*==========================================================================*/
13007const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13008{
13009 return (const cam_related_system_calibration_data_t *)
13010 &(gCamCapability[mCameraId]->related_cam_calibration);
13011}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013012
13013/*===========================================================================
13014 * FUNCTION : is60HzZone
13015 *
13016 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13017 *
13018 * PARAMETERS : None
13019 *
13020 * RETURN : True if in 60Hz zone, False otherwise
13021 *==========================================================================*/
13022bool QCamera3HardwareInterface::is60HzZone()
13023{
13024 time_t t = time(NULL);
13025 struct tm lt;
13026
13027 struct tm* r = localtime_r(&t, &lt);
13028
13029 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13030 return true;
13031 else
13032 return false;
13033}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013034
13035/*===========================================================================
13036 * FUNCTION : adjustBlackLevelForCFA
13037 *
13038 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13039 * of bayer CFA (Color Filter Array).
13040 *
13041 * PARAMETERS : @input: black level pattern in the order of RGGB
13042 * @output: black level pattern in the order of CFA
13043 * @color_arrangement: CFA color arrangement
13044 *
13045 * RETURN : None
13046 *==========================================================================*/
13047template<typename T>
13048void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13049 T input[BLACK_LEVEL_PATTERN_CNT],
13050 T output[BLACK_LEVEL_PATTERN_CNT],
13051 cam_color_filter_arrangement_t color_arrangement)
13052{
13053 switch (color_arrangement) {
13054 case CAM_FILTER_ARRANGEMENT_GRBG:
13055 output[0] = input[1];
13056 output[1] = input[0];
13057 output[2] = input[3];
13058 output[3] = input[2];
13059 break;
13060 case CAM_FILTER_ARRANGEMENT_GBRG:
13061 output[0] = input[2];
13062 output[1] = input[3];
13063 output[2] = input[0];
13064 output[3] = input[1];
13065 break;
13066 case CAM_FILTER_ARRANGEMENT_BGGR:
13067 output[0] = input[3];
13068 output[1] = input[2];
13069 output[2] = input[1];
13070 output[3] = input[0];
13071 break;
13072 case CAM_FILTER_ARRANGEMENT_RGGB:
13073 output[0] = input[0];
13074 output[1] = input[1];
13075 output[2] = input[2];
13076 output[3] = input[3];
13077 break;
13078 default:
13079 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13080 break;
13081 }
13082}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013083
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013084void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13085 const camera_metadata_t &resultMetadata) {
13086 if (result != nullptr) {
13087 if (result->outputBuffers.size() != 1) {
13088 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13089 result->outputBuffers.size());
13090 return;
13091 }
13092
13093 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13094 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13095 result->outputBuffers[0].streamId);
13096 return;
13097 }
13098
13099 // Send HDR+ metadata to framework.
13100 {
13101 pthread_mutex_lock(&mMutex);
13102 handlePendingResultsWithLock(result->requestId, clone_camera_metadata(&resultMetadata));
13103 pthread_mutex_unlock(&mMutex);
13104 }
13105
13106 HdrPlusPendingRequest pendingRequest;
13107 {
13108 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13109 auto req = mHdrPlusPendingRequests.find(result->requestId);
13110 pendingRequest = req->second;
13111 }
13112
13113 // Check if dumping HDR+ YUV output is enabled.
13114 char prop[PROPERTY_VALUE_MAX];
13115 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13116 bool dumpYuvOutput = atoi(prop);
13117
13118 if (dumpYuvOutput) {
13119 QCamera3PicChannel *picChannel =
13120 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13121
13122 // Dump yuv buffer to a ppm file.
13123 pbcamera::StreamConfiguration outputConfig;
13124 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13125 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13126 if (rc == OK) {
13127 char buf[FILENAME_MAX] = {};
13128 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13129 result->requestId, result->outputBuffers[0].streamId,
13130 outputConfig.image.width, outputConfig.image.height);
13131
13132 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13133 } else {
13134 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13135 __FUNCTION__, strerror(-rc), rc);
13136 }
13137 }
13138
13139 // Return the buffer to pic channel.
13140 // TODO: Use result metadata.
13141 mPictureChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13142 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13143 pendingRequest.settings);
13144
13145 // Remove the HDR+ pending request.
13146 {
13147 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13148 auto req = mHdrPlusPendingRequests.find(result->requestId);
13149 mHdrPlusPendingRequests.erase(req);
13150 }
13151 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013152}
13153
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013154void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13155 // TODO: Handle HDR+ capture failures and send the failure to framework.
13156 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13157 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13158
13159 // Return the buffer to pic channel.
13160 QCamera3PicChannel *picChannel =
13161 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
13162 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
13163
13164 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013165}
13166
Thierry Strudel3d639192016-09-09 11:52:26 -070013167}; //end namespace qcamera