blob: f3d75a2f37ff39983a30df5d1028a670d8fb58d6 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700135// If Easel is connected.
136bool gEaselConnected;
Thierry Strudel3d639192016-09-09 11:52:26 -0700137
138const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
139 {"On", CAM_CDS_MODE_ON},
140 {"Off", CAM_CDS_MODE_OFF},
141 {"Auto",CAM_CDS_MODE_AUTO}
142};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_video_hdr_mode_t,
145 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
146 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
147 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
148};
149
Thierry Strudel54dc9782017-02-15 12:12:10 -0800150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_binning_correction_mode_t,
152 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
153 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
154 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
155};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700156
157const QCamera3HardwareInterface::QCameraMap<
158 camera_metadata_enum_android_ir_mode_t,
159 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
160 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
161 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
162 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
163};
Thierry Strudel3d639192016-09-09 11:52:26 -0700164
165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_control_effect_mode_t,
167 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
168 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
169 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
170 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
171 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
173 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
174 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
175 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
176 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
177};
178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_control_awb_mode_t,
181 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
182 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
183 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
184 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
185 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
186 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
187 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
188 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
189 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
190 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194 camera_metadata_enum_android_control_scene_mode_t,
195 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
196 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
197 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
198 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
199 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
200 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
201 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
202 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
203 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
204 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
205 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
206 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
207 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
208 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
209 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
210 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800211 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
212 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_af_mode_t,
217 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
218 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
219 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
220 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
221 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
222 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
223 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
224 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_color_correction_aberration_mode_t,
229 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
231 CAM_COLOR_CORRECTION_ABERRATION_OFF },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
233 CAM_COLOR_CORRECTION_ABERRATION_FAST },
234 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
235 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_ae_antibanding_mode_t,
240 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
243 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
244 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_control_ae_mode_t,
249 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
250 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
251 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
253 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
254 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_flash_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
260 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
262 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_statistics_face_detect_mode_t,
267 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
269 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
270 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
275 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
277 CAM_FOCUS_UNCALIBRATED },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
279 CAM_FOCUS_APPROXIMATE },
280 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
281 CAM_FOCUS_CALIBRATED }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_state_t,
286 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
287 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
288 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
289};
290
291const int32_t available_thumbnail_sizes[] = {0, 0,
292 176, 144,
293 240, 144,
294 256, 144,
295 240, 160,
296 256, 154,
297 240, 240,
298 320, 240};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_sensor_test_pattern_mode_t,
302 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
307 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
308 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
309};
310
311/* Since there is no mapping for all the options some Android enum are not listed.
312 * Also, the order in this list is important because while mapping from HAL to Android it will
313 * traverse from lower to higher index which means that for HAL values that are map to different
314 * Android values, the traverse logic will select the first one found.
315 */
316const QCamera3HardwareInterface::QCameraMap<
317 camera_metadata_enum_android_sensor_reference_illuminant1_t,
318 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
335};
336
337const QCamera3HardwareInterface::QCameraMap<
338 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
339 { 60, CAM_HFR_MODE_60FPS},
340 { 90, CAM_HFR_MODE_90FPS},
341 { 120, CAM_HFR_MODE_120FPS},
342 { 150, CAM_HFR_MODE_150FPS},
343 { 180, CAM_HFR_MODE_180FPS},
344 { 210, CAM_HFR_MODE_210FPS},
345 { 240, CAM_HFR_MODE_240FPS},
346 { 480, CAM_HFR_MODE_480FPS},
347};
348
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700349const QCamera3HardwareInterface::QCameraMap<
350 qcamera3_ext_instant_aec_mode_t,
351 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
352 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
353 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
354 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
355};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800356
357const QCamera3HardwareInterface::QCameraMap<
358 qcamera3_ext_exposure_meter_mode_t,
359 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
360 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
361 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
362 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
363 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
364 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
365 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
366 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
367};
368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_iso_mode_t,
371 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
372 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
373 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
374 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
375 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
376 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
377 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
378 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
379 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
380};
381
Thierry Strudel3d639192016-09-09 11:52:26 -0700382camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
383 .initialize = QCamera3HardwareInterface::initialize,
384 .configure_streams = QCamera3HardwareInterface::configure_streams,
385 .register_stream_buffers = NULL,
386 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
387 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
388 .get_metadata_vendor_tag_ops = NULL,
389 .dump = QCamera3HardwareInterface::dump,
390 .flush = QCamera3HardwareInterface::flush,
391 .reserved = {0},
392};
393
394// initialise to some default value
395uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
396
397/*===========================================================================
398 * FUNCTION : QCamera3HardwareInterface
399 *
400 * DESCRIPTION: constructor of QCamera3HardwareInterface
401 *
402 * PARAMETERS :
403 * @cameraId : camera ID
404 *
405 * RETURN : none
406 *==========================================================================*/
407QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
408 const camera_module_callbacks_t *callbacks)
409 : mCameraId(cameraId),
410 mCameraHandle(NULL),
411 mCameraInitialized(false),
412 mCallbackOps(NULL),
413 mMetadataChannel(NULL),
414 mPictureChannel(NULL),
415 mRawChannel(NULL),
416 mSupportChannel(NULL),
417 mAnalysisChannel(NULL),
418 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700419 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800421 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800422 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700423 mChannelHandle(0),
424 mFirstConfiguration(true),
425 mFlush(false),
426 mFlushPerf(false),
427 mParamHeap(NULL),
428 mParameters(NULL),
429 mPrevParameters(NULL),
430 m_bIsVideo(false),
431 m_bIs4KVideo(false),
432 m_bEisSupportedSize(false),
433 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800434 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 m_MobicatMask(0),
436 mMinProcessedFrameDuration(0),
437 mMinJpegFrameDuration(0),
438 mMinRawFrameDuration(0),
439 mMetaFrameCount(0U),
440 mUpdateDebugLevel(false),
441 mCallbacks(callbacks),
442 mCaptureIntent(0),
443 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700444 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800445 /* DevCamDebug metadata internal m control*/
446 mDevCamDebugMetaEnable(0),
447 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mBatchSize(0),
449 mToBeQueuedVidBufs(0),
450 mHFRVideoFps(DEFAULT_VIDEO_FPS),
451 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800452 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800453 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mFirstFrameNumberInBatch(0),
455 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700457 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
458 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000459 mPDSupported(false),
460 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700461 mInstantAEC(false),
462 mResetInstantAEC(false),
463 mInstantAECSettledFrameNumber(0),
464 mAecSkipDisplayFrameBound(0),
465 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800466 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700467 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700468 mLastCustIntentFrmNum(-1),
469 mState(CLOSED),
470 mIsDeviceLinked(false),
471 mIsMainCamera(true),
472 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700473 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800475 mHdrPlusModeEnabled(false),
476 mIsApInputUsedForHdrPlus(false),
477 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800478 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700479{
480 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mCommon.init(gCamCapability[cameraId]);
482 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#ifndef USE_HAL_3_3
484 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
485#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mCameraDevice.common.close = close_camera_device;
489 mCameraDevice.ops = &mCameraOps;
490 mCameraDevice.priv = this;
491 gCamCapability[cameraId]->version = CAM_HAL_V3;
492 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
493 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
494 gCamCapability[cameraId]->min_num_pp_bufs = 3;
495
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800496 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700497
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800498 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mPendingLiveRequest = 0;
500 mCurrentRequestId = -1;
501 pthread_mutex_init(&mMutex, NULL);
502
503 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
504 mDefaultMetadata[i] = NULL;
505
506 // Getting system props of different kinds
507 char prop[PROPERTY_VALUE_MAX];
508 memset(prop, 0, sizeof(prop));
509 property_get("persist.camera.raw.dump", prop, "0");
510 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800511 property_get("persist.camera.hal3.force.hdr", prop, "0");
512 mForceHdrSnapshot = atoi(prop);
513
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 if (mEnableRawDump)
515 LOGD("Raw dump from Camera HAL enabled");
516
517 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
518 memset(mLdafCalib, 0, sizeof(mLdafCalib));
519
520 memset(prop, 0, sizeof(prop));
521 property_get("persist.camera.tnr.preview", prop, "0");
522 m_bTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800525 property_get("persist.camera.swtnr.preview", prop, "1");
526 m_bSwTnrPreview = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 property_get("persist.camera.tnr.video", prop, "0");
530 m_bTnrVideo = (uint8_t)atoi(prop);
531
532 memset(prop, 0, sizeof(prop));
533 property_get("persist.camera.avtimer.debug", prop, "0");
534 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800535 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700536
Thierry Strudel54dc9782017-02-15 12:12:10 -0800537 memset(prop, 0, sizeof(prop));
538 property_get("persist.camera.cacmode.disable", prop, "0");
539 m_cacModeDisabled = (uint8_t)atoi(prop);
540
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 //Load and read GPU library.
542 lib_surface_utils = NULL;
543 LINK_get_surface_pixel_alignment = NULL;
544 mSurfaceStridePadding = CAM_PAD_TO_32;
545 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
546 if (lib_surface_utils) {
547 *(void **)&LINK_get_surface_pixel_alignment =
548 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
549 if (LINK_get_surface_pixel_alignment) {
550 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
551 }
552 dlclose(lib_surface_utils);
553 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700554
Emilian Peev0f3c3162017-03-15 12:57:46 +0000555 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
556 mPDSupported = (0 <= mPDIndex) ? true : false;
557
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700558 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700559}
560
561/*===========================================================================
562 * FUNCTION : ~QCamera3HardwareInterface
563 *
564 * DESCRIPTION: destructor of QCamera3HardwareInterface
565 *
566 * PARAMETERS : none
567 *
568 * RETURN : none
569 *==========================================================================*/
570QCamera3HardwareInterface::~QCamera3HardwareInterface()
571{
572 LOGD("E");
573
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800574 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700575
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 // Disable power hint and enable the perf lock for close camera
577 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
578 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
579
580 // unlink of dualcam during close camera
581 if (mIsDeviceLinked) {
582 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
583 &m_pDualCamCmdPtr->bundle_info;
584 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
585 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
586 pthread_mutex_lock(&gCamLock);
587
588 if (mIsMainCamera == 1) {
589 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
590 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
591 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
592 // related session id should be session id of linked session
593 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
594 } else {
595 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
596 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
597 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
598 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
599 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800600 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800601 pthread_mutex_unlock(&gCamLock);
602
603 rc = mCameraHandle->ops->set_dual_cam_cmd(
604 mCameraHandle->camera_handle);
605 if (rc < 0) {
606 LOGE("Dualcam: Unlink failed, but still proceed to close");
607 }
608 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700609
610 /* We need to stop all streams before deleting any stream */
611 if (mRawDumpChannel) {
612 mRawDumpChannel->stop();
613 }
614
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700615 if (mHdrPlusRawSrcChannel) {
616 mHdrPlusRawSrcChannel->stop();
617 }
618
Thierry Strudel3d639192016-09-09 11:52:26 -0700619 // NOTE: 'camera3_stream_t *' objects are already freed at
620 // this stage by the framework
621 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
622 it != mStreamInfo.end(); it++) {
623 QCamera3ProcessingChannel *channel = (*it)->channel;
624 if (channel) {
625 channel->stop();
626 }
627 }
628 if (mSupportChannel)
629 mSupportChannel->stop();
630
631 if (mAnalysisChannel) {
632 mAnalysisChannel->stop();
633 }
634 if (mMetadataChannel) {
635 mMetadataChannel->stop();
636 }
637 if (mChannelHandle) {
638 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
639 mChannelHandle);
640 LOGD("stopping channel %d", mChannelHandle);
641 }
642
643 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
644 it != mStreamInfo.end(); it++) {
645 QCamera3ProcessingChannel *channel = (*it)->channel;
646 if (channel)
647 delete channel;
648 free (*it);
649 }
650 if (mSupportChannel) {
651 delete mSupportChannel;
652 mSupportChannel = NULL;
653 }
654
655 if (mAnalysisChannel) {
656 delete mAnalysisChannel;
657 mAnalysisChannel = NULL;
658 }
659 if (mRawDumpChannel) {
660 delete mRawDumpChannel;
661 mRawDumpChannel = NULL;
662 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700663 if (mHdrPlusRawSrcChannel) {
664 delete mHdrPlusRawSrcChannel;
665 mHdrPlusRawSrcChannel = NULL;
666 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700667 if (mDummyBatchChannel) {
668 delete mDummyBatchChannel;
669 mDummyBatchChannel = NULL;
670 }
671
672 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800673 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700674
675 if (mMetadataChannel) {
676 delete mMetadataChannel;
677 mMetadataChannel = NULL;
678 }
679
680 /* Clean up all channels */
681 if (mCameraInitialized) {
682 if(!mFirstConfiguration){
683 //send the last unconfigure
684 cam_stream_size_info_t stream_config_info;
685 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
686 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
687 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800688 m_bIs4KVideo ? 0 :
689 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700690 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700691 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
692 stream_config_info);
693 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
694 if (rc < 0) {
695 LOGE("set_parms failed for unconfigure");
696 }
697 }
698 deinitParameters();
699 }
700
701 if (mChannelHandle) {
702 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
703 mChannelHandle);
704 LOGH("deleting channel %d", mChannelHandle);
705 mChannelHandle = 0;
706 }
707
708 if (mState != CLOSED)
709 closeCamera();
710
711 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
712 req.mPendingBufferList.clear();
713 }
714 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700715 for (pendingRequestIterator i = mPendingRequestsList.begin();
716 i != mPendingRequestsList.end();) {
717 i = erasePendingRequest(i);
718 }
719 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
720 if (mDefaultMetadata[i])
721 free_camera_metadata(mDefaultMetadata[i]);
722
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800723 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700724
725 pthread_cond_destroy(&mRequestCond);
726
727 pthread_cond_destroy(&mBuffersCond);
728
729 pthread_mutex_destroy(&mMutex);
730 LOGD("X");
731}
732
733/*===========================================================================
734 * FUNCTION : erasePendingRequest
735 *
736 * DESCRIPTION: function to erase a desired pending request after freeing any
737 * allocated memory
738 *
739 * PARAMETERS :
740 * @i : iterator pointing to pending request to be erased
741 *
742 * RETURN : iterator pointing to the next request
743 *==========================================================================*/
744QCamera3HardwareInterface::pendingRequestIterator
745 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
746{
747 if (i->input_buffer != NULL) {
748 free(i->input_buffer);
749 i->input_buffer = NULL;
750 }
751 if (i->settings != NULL)
752 free_camera_metadata((camera_metadata_t*)i->settings);
753 return mPendingRequestsList.erase(i);
754}
755
756/*===========================================================================
757 * FUNCTION : camEvtHandle
758 *
759 * DESCRIPTION: Function registered to mm-camera-interface to handle events
760 *
761 * PARAMETERS :
762 * @camera_handle : interface layer camera handle
763 * @evt : ptr to event
764 * @user_data : user data ptr
765 *
766 * RETURN : none
767 *==========================================================================*/
768void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
769 mm_camera_event_t *evt,
770 void *user_data)
771{
772 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
773 if (obj && evt) {
774 switch(evt->server_event_type) {
775 case CAM_EVENT_TYPE_DAEMON_DIED:
776 pthread_mutex_lock(&obj->mMutex);
777 obj->mState = ERROR;
778 pthread_mutex_unlock(&obj->mMutex);
779 LOGE("Fatal, camera daemon died");
780 break;
781
782 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
783 LOGD("HAL got request pull from Daemon");
784 pthread_mutex_lock(&obj->mMutex);
785 obj->mWokenUpByDaemon = true;
786 obj->unblockRequestIfNecessary();
787 pthread_mutex_unlock(&obj->mMutex);
788 break;
789
790 default:
791 LOGW("Warning: Unhandled event %d",
792 evt->server_event_type);
793 break;
794 }
795 } else {
796 LOGE("NULL user_data/evt");
797 }
798}
799
800/*===========================================================================
801 * FUNCTION : openCamera
802 *
803 * DESCRIPTION: open camera
804 *
805 * PARAMETERS :
806 * @hw_device : double ptr for camera device struct
807 *
808 * RETURN : int32_t type of status
809 * NO_ERROR -- success
810 * none-zero failure code
811 *==========================================================================*/
812int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
813{
814 int rc = 0;
815 if (mState != CLOSED) {
816 *hw_device = NULL;
817 return PERMISSION_DENIED;
818 }
819
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
822 mCameraId);
823
824 rc = openCamera();
825 if (rc == 0) {
826 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800827 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700828 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800829 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
832 mCameraId, rc);
833
834 if (rc == NO_ERROR) {
835 mState = OPENED;
836 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800837
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700838 if (gHdrPlusClient != nullptr) {
839 mIsApInputUsedForHdrPlus =
840 property_get_bool("persist.camera.hdrplus.apinput", false);
841 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
842 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
843 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800844
Thierry Strudel3d639192016-09-09 11:52:26 -0700845 return rc;
846}
847
848/*===========================================================================
849 * FUNCTION : openCamera
850 *
851 * DESCRIPTION: open camera
852 *
853 * PARAMETERS : none
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera()
860{
861 int rc = 0;
862 char value[PROPERTY_VALUE_MAX];
863
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800864 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 if (mCameraHandle) {
866 LOGE("Failure: Camera already opened");
867 return ALREADY_EXISTS;
868 }
869
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800870 if (gHdrPlusClient != nullptr) {
871 rc = gHdrPlusClient->resumeEasel();
872 if (rc != 0) {
873 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
874 return rc;
875 }
876 }
877
Thierry Strudel3d639192016-09-09 11:52:26 -0700878 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
879 if (rc < 0) {
880 LOGE("Failed to reserve flash for camera id: %d",
881 mCameraId);
882 return UNKNOWN_ERROR;
883 }
884
885 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
886 if (rc) {
887 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
888 return rc;
889 }
890
891 if (!mCameraHandle) {
892 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
893 return -ENODEV;
894 }
895
896 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
897 camEvtHandle, (void *)this);
898
899 if (rc < 0) {
900 LOGE("Error, failed to register event callback");
901 /* Not closing camera here since it is already handled in destructor */
902 return FAILED_TRANSACTION;
903 }
904
905 mExifParams.debug_params =
906 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
907 if (mExifParams.debug_params) {
908 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
909 } else {
910 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
911 return NO_MEMORY;
912 }
913 mFirstConfiguration = true;
914
915 //Notify display HAL that a camera session is active.
916 //But avoid calling the same during bootup because camera service might open/close
917 //cameras at boot time during its initialization and display service will also internally
918 //wait for camera service to initialize first while calling this display API, resulting in a
919 //deadlock situation. Since boot time camera open/close calls are made only to fetch
920 //capabilities, no need of this display bw optimization.
921 //Use "service.bootanim.exit" property to know boot status.
922 property_get("service.bootanim.exit", value, "0");
923 if (atoi(value) == 1) {
924 pthread_mutex_lock(&gCamLock);
925 if (gNumCameraSessions++ == 0) {
926 setCameraLaunchStatus(true);
927 }
928 pthread_mutex_unlock(&gCamLock);
929 }
930
931 //fill the session id needed while linking dual cam
932 pthread_mutex_lock(&gCamLock);
933 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
934 &sessionId[mCameraId]);
935 pthread_mutex_unlock(&gCamLock);
936
937 if (rc < 0) {
938 LOGE("Error, failed to get sessiion id");
939 return UNKNOWN_ERROR;
940 } else {
941 //Allocate related cam sync buffer
942 //this is needed for the payload that goes along with bundling cmd for related
943 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700944 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
945 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700946 if(rc != OK) {
947 rc = NO_MEMORY;
948 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
949 return NO_MEMORY;
950 }
951
952 //Map memory for related cam sync buffer
953 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
955 m_pDualCamCmdHeap->getFd(0),
956 sizeof(cam_dual_camera_cmd_info_t),
957 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700958 if(rc < 0) {
959 LOGE("Dualcam: failed to map Related cam sync buffer");
960 rc = FAILED_TRANSACTION;
961 return NO_MEMORY;
962 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700963 m_pDualCamCmdPtr =
964 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700965 }
966
967 LOGH("mCameraId=%d",mCameraId);
968
969 return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION : closeCamera
974 *
975 * DESCRIPTION: close camera
976 *
977 * PARAMETERS : none
978 *
979 * RETURN : int32_t type of status
980 * NO_ERROR -- success
981 * none-zero failure code
982 *==========================================================================*/
983int QCamera3HardwareInterface::closeCamera()
984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700986 int rc = NO_ERROR;
987 char value[PROPERTY_VALUE_MAX];
988
989 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
990 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991
992 // unmap memory for related cam sync buffer
993 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800994 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 if (NULL != m_pDualCamCmdHeap) {
996 m_pDualCamCmdHeap->deallocate();
997 delete m_pDualCamCmdHeap;
998 m_pDualCamCmdHeap = NULL;
999 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001000 }
1001
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1003 mCameraHandle = NULL;
1004
1005 //reset session id to some invalid id
1006 pthread_mutex_lock(&gCamLock);
1007 sessionId[mCameraId] = 0xDEADBEEF;
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 //Notify display HAL that there is no active camera session
1011 //but avoid calling the same during bootup. Refer to openCamera
1012 //for more details.
1013 property_get("service.bootanim.exit", value, "0");
1014 if (atoi(value) == 1) {
1015 pthread_mutex_lock(&gCamLock);
1016 if (--gNumCameraSessions == 0) {
1017 setCameraLaunchStatus(false);
1018 }
1019 pthread_mutex_unlock(&gCamLock);
1020 }
1021
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 if (mExifParams.debug_params) {
1023 free(mExifParams.debug_params);
1024 mExifParams.debug_params = NULL;
1025 }
1026 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1027 LOGW("Failed to release flash for camera id: %d",
1028 mCameraId);
1029 }
1030 mState = CLOSED;
1031 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1032 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001033
1034 if (gHdrPlusClient != nullptr) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001035 // Disable HDR+ mode.
1036 disableHdrPlusModeLocked();
1037 // Disconnect Easel if it's connected.
1038 pthread_mutex_lock(&gCamLock);
1039 if (gEaselConnected) {
1040 gHdrPlusClient->disconnect();
1041 gEaselConnected = false;
1042 }
1043 pthread_mutex_unlock(&gCamLock);
1044
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001045 rc = gHdrPlusClient->suspendEasel();
1046 if (rc != 0) {
1047 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1048 }
1049 }
1050
Thierry Strudel3d639192016-09-09 11:52:26 -07001051 return rc;
1052}
1053
1054/*===========================================================================
1055 * FUNCTION : initialize
1056 *
1057 * DESCRIPTION: Initialize frameworks callback functions
1058 *
1059 * PARAMETERS :
1060 * @callback_ops : callback function to frameworks
1061 *
1062 * RETURN :
1063 *
1064 *==========================================================================*/
1065int QCamera3HardwareInterface::initialize(
1066 const struct camera3_callback_ops *callback_ops)
1067{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001068 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001069 int rc;
1070
1071 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1072 pthread_mutex_lock(&mMutex);
1073
1074 // Validate current state
1075 switch (mState) {
1076 case OPENED:
1077 /* valid state */
1078 break;
1079 default:
1080 LOGE("Invalid state %d", mState);
1081 rc = -ENODEV;
1082 goto err1;
1083 }
1084
1085 rc = initParameters();
1086 if (rc < 0) {
1087 LOGE("initParamters failed %d", rc);
1088 goto err1;
1089 }
1090 mCallbackOps = callback_ops;
1091
1092 mChannelHandle = mCameraHandle->ops->add_channel(
1093 mCameraHandle->camera_handle, NULL, NULL, this);
1094 if (mChannelHandle == 0) {
1095 LOGE("add_channel failed");
1096 rc = -ENOMEM;
1097 pthread_mutex_unlock(&mMutex);
1098 return rc;
1099 }
1100
1101 pthread_mutex_unlock(&mMutex);
1102 mCameraInitialized = true;
1103 mState = INITIALIZED;
1104 LOGI("X");
1105 return 0;
1106
1107err1:
1108 pthread_mutex_unlock(&mMutex);
1109 return rc;
1110}
1111
1112/*===========================================================================
1113 * FUNCTION : validateStreamDimensions
1114 *
1115 * DESCRIPTION: Check if the configuration requested are those advertised
1116 *
1117 * PARAMETERS :
1118 * @stream_list : streams to be configured
1119 *
1120 * RETURN :
1121 *
1122 *==========================================================================*/
1123int QCamera3HardwareInterface::validateStreamDimensions(
1124 camera3_stream_configuration_t *streamList)
1125{
1126 int rc = NO_ERROR;
1127 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001128 uint32_t depthWidth = 0;
1129 uint32_t depthHeight = 0;
1130 if (mPDSupported) {
1131 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1132 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1133 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001134
1135 camera3_stream_t *inputStream = NULL;
1136 /*
1137 * Loop through all streams to find input stream if it exists*
1138 */
1139 for (size_t i = 0; i< streamList->num_streams; i++) {
1140 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1141 if (inputStream != NULL) {
1142 LOGE("Error, Multiple input streams requested");
1143 return -EINVAL;
1144 }
1145 inputStream = streamList->streams[i];
1146 }
1147 }
1148 /*
1149 * Loop through all streams requested in configuration
1150 * Check if unsupported sizes have been requested on any of them
1151 */
1152 for (size_t j = 0; j < streamList->num_streams; j++) {
1153 bool sizeFound = false;
1154 camera3_stream_t *newStream = streamList->streams[j];
1155
1156 uint32_t rotatedHeight = newStream->height;
1157 uint32_t rotatedWidth = newStream->width;
1158 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1159 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1160 rotatedHeight = newStream->width;
1161 rotatedWidth = newStream->height;
1162 }
1163
1164 /*
1165 * Sizes are different for each type of stream format check against
1166 * appropriate table.
1167 */
1168 switch (newStream->format) {
1169 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1170 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1171 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001172 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1173 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1174 mPDSupported) {
1175 if ((depthWidth == newStream->width) &&
1176 (depthHeight == newStream->height)) {
1177 sizeFound = true;
1178 }
1179 break;
1180 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001181 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1182 for (size_t i = 0; i < count; i++) {
1183 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1184 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1185 sizeFound = true;
1186 break;
1187 }
1188 }
1189 break;
1190 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1192 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001193 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001194 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001195 if ((depthSamplesCount == newStream->width) &&
1196 (1 == newStream->height)) {
1197 sizeFound = true;
1198 }
1199 break;
1200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001201 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1202 /* Verify set size against generated sizes table */
1203 for (size_t i = 0; i < count; i++) {
1204 if (((int32_t)rotatedWidth ==
1205 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1206 ((int32_t)rotatedHeight ==
1207 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1208 sizeFound = true;
1209 break;
1210 }
1211 }
1212 break;
1213 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1214 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1215 default:
1216 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1217 || newStream->stream_type == CAMERA3_STREAM_INPUT
1218 || IS_USAGE_ZSL(newStream->usage)) {
1219 if (((int32_t)rotatedWidth ==
1220 gCamCapability[mCameraId]->active_array_size.width) &&
1221 ((int32_t)rotatedHeight ==
1222 gCamCapability[mCameraId]->active_array_size.height)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 /* We could potentially break here to enforce ZSL stream
1227 * set from frameworks always is full active array size
1228 * but it is not clear from the spc if framework will always
1229 * follow that, also we have logic to override to full array
1230 * size, so keeping the logic lenient at the moment
1231 */
1232 }
1233 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1234 MAX_SIZES_CNT);
1235 for (size_t i = 0; i < count; i++) {
1236 if (((int32_t)rotatedWidth ==
1237 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1238 ((int32_t)rotatedHeight ==
1239 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1240 sizeFound = true;
1241 break;
1242 }
1243 }
1244 break;
1245 } /* End of switch(newStream->format) */
1246
1247 /* We error out even if a single stream has unsupported size set */
1248 if (!sizeFound) {
1249 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1250 rotatedWidth, rotatedHeight, newStream->format,
1251 gCamCapability[mCameraId]->active_array_size.width,
1252 gCamCapability[mCameraId]->active_array_size.height);
1253 rc = -EINVAL;
1254 break;
1255 }
1256 } /* End of for each stream */
1257 return rc;
1258}
1259
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001260/*===========================================================================
1261 * FUNCTION : validateUsageFlags
1262 *
1263 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1264 *
1265 * PARAMETERS :
1266 * @stream_list : streams to be configured
1267 *
1268 * RETURN :
1269 * NO_ERROR if the usage flags are supported
1270 * error code if usage flags are not supported
1271 *
1272 *==========================================================================*/
1273int QCamera3HardwareInterface::validateUsageFlags(
1274 const camera3_stream_configuration_t* streamList)
1275{
1276 for (size_t j = 0; j < streamList->num_streams; j++) {
1277 const camera3_stream_t *newStream = streamList->streams[j];
1278
1279 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1280 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1281 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1282 continue;
1283 }
1284
1285 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1286 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1287 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1288 bool forcePreviewUBWC = true;
1289 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1290 forcePreviewUBWC = false;
1291 }
1292 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1293 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1294 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1295 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1296 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1297 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1298
1299 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1300 // So color spaces will always match.
1301
1302 // Check whether underlying formats of shared streams match.
1303 if (isVideo && isPreview && videoFormat != previewFormat) {
1304 LOGE("Combined video and preview usage flag is not supported");
1305 return -EINVAL;
1306 }
1307 if (isPreview && isZSL && previewFormat != zslFormat) {
1308 LOGE("Combined preview and zsl usage flag is not supported");
1309 return -EINVAL;
1310 }
1311 if (isVideo && isZSL && videoFormat != zslFormat) {
1312 LOGE("Combined video and zsl usage flag is not supported");
1313 return -EINVAL;
1314 }
1315 }
1316 return NO_ERROR;
1317}
1318
1319/*===========================================================================
1320 * FUNCTION : validateUsageFlagsForEis
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlagsForEis(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1339 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1340
1341 // Because EIS is "hard-coded" for certain use case, and current
1342 // implementation doesn't support shared preview and video on the same
1343 // stream, return failure if EIS is forced on.
1344 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1345 LOGE("Combined video and preview usage flag is not supported due to EIS");
1346 return -EINVAL;
1347 }
1348 }
1349 return NO_ERROR;
1350}
1351
Thierry Strudel3d639192016-09-09 11:52:26 -07001352/*==============================================================================
1353 * FUNCTION : isSupportChannelNeeded
1354 *
1355 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1356 *
1357 * PARAMETERS :
1358 * @stream_list : streams to be configured
1359 * @stream_config_info : the config info for streams to be configured
1360 *
1361 * RETURN : Boolen true/false decision
1362 *
1363 *==========================================================================*/
1364bool QCamera3HardwareInterface::isSupportChannelNeeded(
1365 camera3_stream_configuration_t *streamList,
1366 cam_stream_size_info_t stream_config_info)
1367{
1368 uint32_t i;
1369 bool pprocRequested = false;
1370 /* Check for conditions where PProc pipeline does not have any streams*/
1371 for (i = 0; i < stream_config_info.num_streams; i++) {
1372 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1373 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1374 pprocRequested = true;
1375 break;
1376 }
1377 }
1378
1379 if (pprocRequested == false )
1380 return true;
1381
1382 /* Dummy stream needed if only raw or jpeg streams present */
1383 for (i = 0; i < streamList->num_streams; i++) {
1384 switch(streamList->streams[i]->format) {
1385 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1386 case HAL_PIXEL_FORMAT_RAW10:
1387 case HAL_PIXEL_FORMAT_RAW16:
1388 case HAL_PIXEL_FORMAT_BLOB:
1389 break;
1390 default:
1391 return false;
1392 }
1393 }
1394 return true;
1395}
1396
1397/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001398 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001399 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001400 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001401 *
1402 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001403 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001404 *
1405 * RETURN : int32_t type of status
1406 * NO_ERROR -- success
1407 * none-zero failure code
1408 *
1409 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001410int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001411{
1412 int32_t rc = NO_ERROR;
1413
1414 cam_dimension_t max_dim = {0, 0};
1415 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1416 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1417 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1418 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1419 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1420 }
1421
1422 clear_metadata_buffer(mParameters);
1423
1424 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1425 max_dim);
1426 if (rc != NO_ERROR) {
1427 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1428 return rc;
1429 }
1430
1431 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1432 if (rc != NO_ERROR) {
1433 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1434 return rc;
1435 }
1436
1437 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001439
1440 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1441 mParameters);
1442 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 return rc;
1445 }
1446
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001447 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001448 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1449 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1450 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1451 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1452 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001453
1454 return rc;
1455}
1456
1457/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 * FUNCTION : addToPPFeatureMask
1459 *
1460 * DESCRIPTION: add additional features to pp feature mask based on
1461 * stream type and usecase
1462 *
1463 * PARAMETERS :
1464 * @stream_format : stream type for feature mask
1465 * @stream_idx : stream idx within postprocess_mask list to change
1466 *
1467 * RETURN : NULL
1468 *
1469 *==========================================================================*/
1470void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1471 uint32_t stream_idx)
1472{
1473 char feature_mask_value[PROPERTY_VALUE_MAX];
1474 cam_feature_mask_t feature_mask;
1475 int args_converted;
1476 int property_len;
1477
1478 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001479#ifdef _LE_CAMERA_
1480 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1481 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1482 property_len = property_get("persist.camera.hal3.feature",
1483 feature_mask_value, swtnr_feature_mask_value);
1484#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001485 property_len = property_get("persist.camera.hal3.feature",
1486 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1489 (feature_mask_value[1] == 'x')) {
1490 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1491 } else {
1492 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1493 }
1494 if (1 != args_converted) {
1495 feature_mask = 0;
1496 LOGE("Wrong feature mask %s", feature_mask_value);
1497 return;
1498 }
1499
1500 switch (stream_format) {
1501 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1502 /* Add LLVD to pp feature mask only if video hint is enabled */
1503 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1504 mStreamConfigInfo.postprocess_mask[stream_idx]
1505 |= CAM_QTI_FEATURE_SW_TNR;
1506 LOGH("Added SW TNR to pp feature mask");
1507 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1508 mStreamConfigInfo.postprocess_mask[stream_idx]
1509 |= CAM_QCOM_FEATURE_LLVD;
1510 LOGH("Added LLVD SeeMore to pp feature mask");
1511 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001512 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1513 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1514 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1515 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001516 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1517 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1518 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1519 CAM_QTI_FEATURE_BINNING_CORRECTION;
1520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001521 break;
1522 }
1523 default:
1524 break;
1525 }
1526 LOGD("PP feature mask %llx",
1527 mStreamConfigInfo.postprocess_mask[stream_idx]);
1528}
1529
1530/*==============================================================================
1531 * FUNCTION : updateFpsInPreviewBuffer
1532 *
1533 * DESCRIPTION: update FPS information in preview buffer.
1534 *
1535 * PARAMETERS :
1536 * @metadata : pointer to metadata buffer
1537 * @frame_number: frame_number to look for in pending buffer list
1538 *
1539 * RETURN : None
1540 *
1541 *==========================================================================*/
1542void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1543 uint32_t frame_number)
1544{
1545 // Mark all pending buffers for this particular request
1546 // with corresponding framerate information
1547 for (List<PendingBuffersInRequest>::iterator req =
1548 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1549 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1550 for(List<PendingBufferInfo>::iterator j =
1551 req->mPendingBufferList.begin();
1552 j != req->mPendingBufferList.end(); j++) {
1553 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1554 if ((req->frame_number == frame_number) &&
1555 (channel->getStreamTypeMask() &
1556 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1557 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1558 CAM_INTF_PARM_FPS_RANGE, metadata) {
1559 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1560 struct private_handle_t *priv_handle =
1561 (struct private_handle_t *)(*(j->buffer));
1562 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1563 }
1564 }
1565 }
1566 }
1567}
1568
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001569/*==============================================================================
1570 * FUNCTION : updateTimeStampInPendingBuffers
1571 *
1572 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1573 * of a frame number
1574 *
1575 * PARAMETERS :
1576 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1577 * @timestamp : timestamp to be set
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1583 uint32_t frameNumber, nsecs_t timestamp)
1584{
1585 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1586 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1587 if (req->frame_number != frameNumber)
1588 continue;
1589
1590 for (auto k = req->mPendingBufferList.begin();
1591 k != req->mPendingBufferList.end(); k++ ) {
1592 struct private_handle_t *priv_handle =
1593 (struct private_handle_t *) (*(k->buffer));
1594 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1595 }
1596 }
1597 return;
1598}
1599
Thierry Strudel3d639192016-09-09 11:52:26 -07001600/*===========================================================================
1601 * FUNCTION : configureStreams
1602 *
1603 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1604 * and output streams.
1605 *
1606 * PARAMETERS :
1607 * @stream_list : streams to be configured
1608 *
1609 * RETURN :
1610 *
1611 *==========================================================================*/
1612int QCamera3HardwareInterface::configureStreams(
1613 camera3_stream_configuration_t *streamList)
1614{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001615 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 int rc = 0;
1617
1618 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001619 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001621 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001622
1623 return rc;
1624}
1625
1626/*===========================================================================
1627 * FUNCTION : configureStreamsPerfLocked
1628 *
1629 * DESCRIPTION: configureStreams while perfLock is held.
1630 *
1631 * PARAMETERS :
1632 * @stream_list : streams to be configured
1633 *
1634 * RETURN : int32_t type of status
1635 * NO_ERROR -- success
1636 * none-zero failure code
1637 *==========================================================================*/
1638int QCamera3HardwareInterface::configureStreamsPerfLocked(
1639 camera3_stream_configuration_t *streamList)
1640{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001641 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001642 int rc = 0;
1643
1644 // Sanity check stream_list
1645 if (streamList == NULL) {
1646 LOGE("NULL stream configuration");
1647 return BAD_VALUE;
1648 }
1649 if (streamList->streams == NULL) {
1650 LOGE("NULL stream list");
1651 return BAD_VALUE;
1652 }
1653
1654 if (streamList->num_streams < 1) {
1655 LOGE("Bad number of streams requested: %d",
1656 streamList->num_streams);
1657 return BAD_VALUE;
1658 }
1659
1660 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1661 LOGE("Maximum number of streams %d exceeded: %d",
1662 MAX_NUM_STREAMS, streamList->num_streams);
1663 return BAD_VALUE;
1664 }
1665
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001666 rc = validateUsageFlags(streamList);
1667 if (rc != NO_ERROR) {
1668 return rc;
1669 }
1670
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 mOpMode = streamList->operation_mode;
1672 LOGD("mOpMode: %d", mOpMode);
1673
1674 /* first invalidate all the steams in the mStreamList
1675 * if they appear again, they will be validated */
1676 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1677 it != mStreamInfo.end(); it++) {
1678 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1679 if (channel) {
1680 channel->stop();
1681 }
1682 (*it)->status = INVALID;
1683 }
1684
1685 if (mRawDumpChannel) {
1686 mRawDumpChannel->stop();
1687 delete mRawDumpChannel;
1688 mRawDumpChannel = NULL;
1689 }
1690
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001691 if (mHdrPlusRawSrcChannel) {
1692 mHdrPlusRawSrcChannel->stop();
1693 delete mHdrPlusRawSrcChannel;
1694 mHdrPlusRawSrcChannel = NULL;
1695 }
1696
Thierry Strudel3d639192016-09-09 11:52:26 -07001697 if (mSupportChannel)
1698 mSupportChannel->stop();
1699
1700 if (mAnalysisChannel) {
1701 mAnalysisChannel->stop();
1702 }
1703 if (mMetadataChannel) {
1704 /* If content of mStreamInfo is not 0, there is metadata stream */
1705 mMetadataChannel->stop();
1706 }
1707 if (mChannelHandle) {
1708 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1709 mChannelHandle);
1710 LOGD("stopping channel %d", mChannelHandle);
1711 }
1712
1713 pthread_mutex_lock(&mMutex);
1714
1715 // Check state
1716 switch (mState) {
1717 case INITIALIZED:
1718 case CONFIGURED:
1719 case STARTED:
1720 /* valid state */
1721 break;
1722 default:
1723 LOGE("Invalid state %d", mState);
1724 pthread_mutex_unlock(&mMutex);
1725 return -ENODEV;
1726 }
1727
1728 /* Check whether we have video stream */
1729 m_bIs4KVideo = false;
1730 m_bIsVideo = false;
1731 m_bEisSupportedSize = false;
1732 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001733 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001734 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001735 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001736 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 uint32_t videoWidth = 0U;
1738 uint32_t videoHeight = 0U;
1739 size_t rawStreamCnt = 0;
1740 size_t stallStreamCnt = 0;
1741 size_t processedStreamCnt = 0;
1742 // Number of streams on ISP encoder path
1743 size_t numStreamsOnEncoder = 0;
1744 size_t numYuv888OnEncoder = 0;
1745 bool bYuv888OverrideJpeg = false;
1746 cam_dimension_t largeYuv888Size = {0, 0};
1747 cam_dimension_t maxViewfinderSize = {0, 0};
1748 bool bJpegExceeds4K = false;
1749 bool bJpegOnEncoder = false;
1750 bool bUseCommonFeatureMask = false;
1751 cam_feature_mask_t commonFeatureMask = 0;
1752 bool bSmallJpegSize = false;
1753 uint32_t width_ratio;
1754 uint32_t height_ratio;
1755 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1756 camera3_stream_t *inputStream = NULL;
1757 bool isJpeg = false;
1758 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001759 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001760 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001761
1762 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1763
1764 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001765 uint8_t eis_prop_set;
1766 uint32_t maxEisWidth = 0;
1767 uint32_t maxEisHeight = 0;
1768
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001769 // Initialize all instant AEC related variables
1770 mInstantAEC = false;
1771 mResetInstantAEC = false;
1772 mInstantAECSettledFrameNumber = 0;
1773 mAecSkipDisplayFrameBound = 0;
1774 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001775 mCurrFeatureState = 0;
1776 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001777
Thierry Strudel3d639192016-09-09 11:52:26 -07001778 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1779
1780 size_t count = IS_TYPE_MAX;
1781 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1782 for (size_t i = 0; i < count; i++) {
1783 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001784 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1785 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001786 break;
1787 }
1788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001789
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001790 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001791 maxEisWidth = MAX_EIS_WIDTH;
1792 maxEisHeight = MAX_EIS_HEIGHT;
1793 }
1794
1795 /* EIS setprop control */
1796 char eis_prop[PROPERTY_VALUE_MAX];
1797 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001798 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 eis_prop_set = (uint8_t)atoi(eis_prop);
1800
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001801 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001802 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1803
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001804 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1805 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001806
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 /* stream configurations */
1808 for (size_t i = 0; i < streamList->num_streams; i++) {
1809 camera3_stream_t *newStream = streamList->streams[i];
1810 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1811 "height = %d, rotation = %d, usage = 0x%x",
1812 i, newStream->stream_type, newStream->format,
1813 newStream->width, newStream->height, newStream->rotation,
1814 newStream->usage);
1815 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1816 newStream->stream_type == CAMERA3_STREAM_INPUT){
1817 isZsl = true;
1818 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001819 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1820 IS_USAGE_PREVIEW(newStream->usage)) {
1821 isPreview = true;
1822 }
1823
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1825 inputStream = newStream;
1826 }
1827
Emilian Peev7650c122017-01-19 08:24:33 -08001828 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1829 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 isJpeg = true;
1831 jpegSize.width = newStream->width;
1832 jpegSize.height = newStream->height;
1833 if (newStream->width > VIDEO_4K_WIDTH ||
1834 newStream->height > VIDEO_4K_HEIGHT)
1835 bJpegExceeds4K = true;
1836 }
1837
1838 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1839 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1840 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001841 // In HAL3 we can have multiple different video streams.
1842 // The variables video width and height are used below as
1843 // dimensions of the biggest of them
1844 if (videoWidth < newStream->width ||
1845 videoHeight < newStream->height) {
1846 videoWidth = newStream->width;
1847 videoHeight = newStream->height;
1848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1850 (VIDEO_4K_HEIGHT <= newStream->height)) {
1851 m_bIs4KVideo = true;
1852 }
1853 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1854 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001855
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 }
1857 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1858 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1859 switch (newStream->format) {
1860 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001861 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1862 depthPresent = true;
1863 break;
1864 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 stallStreamCnt++;
1866 if (isOnEncoder(maxViewfinderSize, newStream->width,
1867 newStream->height)) {
1868 numStreamsOnEncoder++;
1869 bJpegOnEncoder = true;
1870 }
1871 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1872 newStream->width);
1873 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1874 newStream->height);;
1875 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1876 "FATAL: max_downscale_factor cannot be zero and so assert");
1877 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1878 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1879 LOGH("Setting small jpeg size flag to true");
1880 bSmallJpegSize = true;
1881 }
1882 break;
1883 case HAL_PIXEL_FORMAT_RAW10:
1884 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1885 case HAL_PIXEL_FORMAT_RAW16:
1886 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1888 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1889 pdStatCount++;
1890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 break;
1892 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1893 processedStreamCnt++;
1894 if (isOnEncoder(maxViewfinderSize, newStream->width,
1895 newStream->height)) {
1896 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1897 !IS_USAGE_ZSL(newStream->usage)) {
1898 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900 numStreamsOnEncoder++;
1901 }
1902 break;
1903 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1904 processedStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 // If Yuv888 size is not greater than 4K, set feature mask
1908 // to SUPERSET so that it support concurrent request on
1909 // YUV and JPEG.
1910 if (newStream->width <= VIDEO_4K_WIDTH &&
1911 newStream->height <= VIDEO_4K_HEIGHT) {
1912 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 }
1914 numStreamsOnEncoder++;
1915 numYuv888OnEncoder++;
1916 largeYuv888Size.width = newStream->width;
1917 largeYuv888Size.height = newStream->height;
1918 }
1919 break;
1920 default:
1921 processedStreamCnt++;
1922 if (isOnEncoder(maxViewfinderSize, newStream->width,
1923 newStream->height)) {
1924 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1925 numStreamsOnEncoder++;
1926 }
1927 break;
1928 }
1929
1930 }
1931 }
1932
1933 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1934 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1935 !m_bIsVideo) {
1936 m_bEisEnable = false;
1937 }
1938
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001939 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1940 pthread_mutex_unlock(&mMutex);
1941 return -EINVAL;
1942 }
1943
Thierry Strudel54dc9782017-02-15 12:12:10 -08001944 uint8_t forceEnableTnr = 0;
1945 char tnr_prop[PROPERTY_VALUE_MAX];
1946 memset(tnr_prop, 0, sizeof(tnr_prop));
1947 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1948 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 /* Logic to enable/disable TNR based on specific config size/etc.*/
1951 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1952 ((videoWidth == 1920 && videoHeight == 1080) ||
1953 (videoWidth == 1280 && videoHeight == 720)) &&
1954 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1955 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001956 else if (forceEnableTnr)
1957 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001958
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001959 char videoHdrProp[PROPERTY_VALUE_MAX];
1960 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1961 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1962 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1963
1964 if (hdr_mode_prop == 1 && m_bIsVideo &&
1965 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1966 m_bVideoHdrEnabled = true;
1967 else
1968 m_bVideoHdrEnabled = false;
1969
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 /* Check if num_streams is sane */
1972 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1973 rawStreamCnt > MAX_RAW_STREAMS ||
1974 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1975 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1976 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1977 pthread_mutex_unlock(&mMutex);
1978 return -EINVAL;
1979 }
1980 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001981 if (isZsl && m_bIs4KVideo) {
1982 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 pthread_mutex_unlock(&mMutex);
1984 return -EINVAL;
1985 }
1986 /* Check if stream sizes are sane */
1987 if (numStreamsOnEncoder > 2) {
1988 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1989 pthread_mutex_unlock(&mMutex);
1990 return -EINVAL;
1991 } else if (1 < numStreamsOnEncoder){
1992 bUseCommonFeatureMask = true;
1993 LOGH("Multiple streams above max viewfinder size, common mask needed");
1994 }
1995
1996 /* Check if BLOB size is greater than 4k in 4k recording case */
1997 if (m_bIs4KVideo && bJpegExceeds4K) {
1998 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Emilian Peev7650c122017-01-19 08:24:33 -08002003 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2004 depthPresent) {
2005 LOGE("HAL doesn't support depth streams in HFR mode!");
2006 pthread_mutex_unlock(&mMutex);
2007 return -EINVAL;
2008 }
2009
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2011 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2012 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2013 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2014 // configurations:
2015 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2016 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2017 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2018 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2019 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2020 __func__);
2021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024
2025 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2026 // the YUV stream's size is greater or equal to the JPEG size, set common
2027 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2028 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2029 jpegSize.width, jpegSize.height) &&
2030 largeYuv888Size.width > jpegSize.width &&
2031 largeYuv888Size.height > jpegSize.height) {
2032 bYuv888OverrideJpeg = true;
2033 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2034 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035 }
2036
2037 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2038 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2039 commonFeatureMask);
2040 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2041 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2042
2043 rc = validateStreamDimensions(streamList);
2044 if (rc == NO_ERROR) {
2045 rc = validateStreamRotations(streamList);
2046 }
2047 if (rc != NO_ERROR) {
2048 LOGE("Invalid stream configuration requested!");
2049 pthread_mutex_unlock(&mMutex);
2050 return rc;
2051 }
2052
Emilian Peev0f3c3162017-03-15 12:57:46 +00002053 if (1 < pdStatCount) {
2054 LOGE("HAL doesn't support multiple PD streams");
2055 pthread_mutex_unlock(&mMutex);
2056 return -EINVAL;
2057 }
2058
2059 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2060 (1 == pdStatCount)) {
2061 LOGE("HAL doesn't support PD streams in HFR mode!");
2062 pthread_mutex_unlock(&mMutex);
2063 return -EINVAL;
2064 }
2065
Thierry Strudel3d639192016-09-09 11:52:26 -07002066 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2067 for (size_t i = 0; i < streamList->num_streams; i++) {
2068 camera3_stream_t *newStream = streamList->streams[i];
2069 LOGH("newStream type = %d, stream format = %d "
2070 "stream size : %d x %d, stream rotation = %d",
2071 newStream->stream_type, newStream->format,
2072 newStream->width, newStream->height, newStream->rotation);
2073 //if the stream is in the mStreamList validate it
2074 bool stream_exists = false;
2075 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2076 it != mStreamInfo.end(); it++) {
2077 if ((*it)->stream == newStream) {
2078 QCamera3ProcessingChannel *channel =
2079 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2080 stream_exists = true;
2081 if (channel)
2082 delete channel;
2083 (*it)->status = VALID;
2084 (*it)->stream->priv = NULL;
2085 (*it)->channel = NULL;
2086 }
2087 }
2088 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2089 //new stream
2090 stream_info_t* stream_info;
2091 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2092 if (!stream_info) {
2093 LOGE("Could not allocate stream info");
2094 rc = -ENOMEM;
2095 pthread_mutex_unlock(&mMutex);
2096 return rc;
2097 }
2098 stream_info->stream = newStream;
2099 stream_info->status = VALID;
2100 stream_info->channel = NULL;
2101 mStreamInfo.push_back(stream_info);
2102 }
2103 /* Covers Opaque ZSL and API1 F/W ZSL */
2104 if (IS_USAGE_ZSL(newStream->usage)
2105 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2106 if (zslStream != NULL) {
2107 LOGE("Multiple input/reprocess streams requested!");
2108 pthread_mutex_unlock(&mMutex);
2109 return BAD_VALUE;
2110 }
2111 zslStream = newStream;
2112 }
2113 /* Covers YUV reprocess */
2114 if (inputStream != NULL) {
2115 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2116 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2117 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2118 && inputStream->width == newStream->width
2119 && inputStream->height == newStream->height) {
2120 if (zslStream != NULL) {
2121 /* This scenario indicates multiple YUV streams with same size
2122 * as input stream have been requested, since zsl stream handle
2123 * is solely use for the purpose of overriding the size of streams
2124 * which share h/w streams we will just make a guess here as to
2125 * which of the stream is a ZSL stream, this will be refactored
2126 * once we make generic logic for streams sharing encoder output
2127 */
2128 LOGH("Warning, Multiple ip/reprocess streams requested!");
2129 }
2130 zslStream = newStream;
2131 }
2132 }
2133 }
2134
2135 /* If a zsl stream is set, we know that we have configured at least one input or
2136 bidirectional stream */
2137 if (NULL != zslStream) {
2138 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2139 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2140 mInputStreamInfo.format = zslStream->format;
2141 mInputStreamInfo.usage = zslStream->usage;
2142 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2143 mInputStreamInfo.dim.width,
2144 mInputStreamInfo.dim.height,
2145 mInputStreamInfo.format, mInputStreamInfo.usage);
2146 }
2147
2148 cleanAndSortStreamInfo();
2149 if (mMetadataChannel) {
2150 delete mMetadataChannel;
2151 mMetadataChannel = NULL;
2152 }
2153 if (mSupportChannel) {
2154 delete mSupportChannel;
2155 mSupportChannel = NULL;
2156 }
2157
2158 if (mAnalysisChannel) {
2159 delete mAnalysisChannel;
2160 mAnalysisChannel = NULL;
2161 }
2162
2163 if (mDummyBatchChannel) {
2164 delete mDummyBatchChannel;
2165 mDummyBatchChannel = NULL;
2166 }
2167
Emilian Peev7650c122017-01-19 08:24:33 -08002168 if (mDepthChannel) {
2169 mDepthChannel = NULL;
2170 }
2171
Thierry Strudel2896d122017-02-23 19:18:03 -08002172 char is_type_value[PROPERTY_VALUE_MAX];
2173 property_get("persist.camera.is_type", is_type_value, "4");
2174 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2175
Thierry Strudel3d639192016-09-09 11:52:26 -07002176 //Create metadata channel and initialize it
2177 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2178 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2179 gCamCapability[mCameraId]->color_arrangement);
2180 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2181 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002182 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 if (mMetadataChannel == NULL) {
2184 LOGE("failed to allocate metadata channel");
2185 rc = -ENOMEM;
2186 pthread_mutex_unlock(&mMutex);
2187 return rc;
2188 }
2189 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2190 if (rc < 0) {
2191 LOGE("metadata channel initialization failed");
2192 delete mMetadataChannel;
2193 mMetadataChannel = NULL;
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Thierry Strudel2896d122017-02-23 19:18:03 -08002198 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002199 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002200 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002201 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2202 /* Allocate channel objects for the requested streams */
2203 for (size_t i = 0; i < streamList->num_streams; i++) {
2204 camera3_stream_t *newStream = streamList->streams[i];
2205 uint32_t stream_usage = newStream->usage;
2206 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2207 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2208 struct camera_info *p_info = NULL;
2209 pthread_mutex_lock(&gCamLock);
2210 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2211 pthread_mutex_unlock(&gCamLock);
2212 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2213 || IS_USAGE_ZSL(newStream->usage)) &&
2214 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002215 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002216 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002217 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2218 if (bUseCommonFeatureMask)
2219 zsl_ppmask = commonFeatureMask;
2220 else
2221 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002222 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002223 if (numStreamsOnEncoder > 0)
2224 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2225 else
2226 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002228 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002230 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002231 LOGH("Input stream configured, reprocess config");
2232 } else {
2233 //for non zsl streams find out the format
2234 switch (newStream->format) {
2235 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2236 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002237 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002238 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2239 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2240 /* add additional features to pp feature mask */
2241 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2242 mStreamConfigInfo.num_streams);
2243
2244 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2246 CAM_STREAM_TYPE_VIDEO;
2247 if (m_bTnrEnabled && m_bTnrVideo) {
2248 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2249 CAM_QCOM_FEATURE_CPP_TNR;
2250 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2251 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2252 ~CAM_QCOM_FEATURE_CDS;
2253 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002254 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2255 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2256 CAM_QTI_FEATURE_PPEISCORE;
2257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 } else {
2259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2260 CAM_STREAM_TYPE_PREVIEW;
2261 if (m_bTnrEnabled && m_bTnrPreview) {
2262 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2263 CAM_QCOM_FEATURE_CPP_TNR;
2264 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2265 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2266 ~CAM_QCOM_FEATURE_CDS;
2267 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002268 if(!m_bSwTnrPreview) {
2269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2270 ~CAM_QTI_FEATURE_SW_TNR;
2271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 padding_info.width_padding = mSurfaceStridePadding;
2273 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002274 previewSize.width = (int32_t)newStream->width;
2275 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 }
2277 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2278 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2279 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2280 newStream->height;
2281 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2282 newStream->width;
2283 }
2284 }
2285 break;
2286 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002287 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2289 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2290 if (bUseCommonFeatureMask)
2291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2292 commonFeatureMask;
2293 else
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2295 CAM_QCOM_FEATURE_NONE;
2296 } else {
2297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2298 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2299 }
2300 break;
2301 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002302 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2304 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2305 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2307 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 /* Remove rotation if it is not supported
2309 for 4K LiveVideo snapshot case (online processing) */
2310 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2311 CAM_QCOM_FEATURE_ROTATION)) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2313 &= ~CAM_QCOM_FEATURE_ROTATION;
2314 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 } else {
2316 if (bUseCommonFeatureMask &&
2317 isOnEncoder(maxViewfinderSize, newStream->width,
2318 newStream->height)) {
2319 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2320 } else {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2322 }
2323 }
2324 if (isZsl) {
2325 if (zslStream) {
2326 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2327 (int32_t)zslStream->width;
2328 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2329 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 } else {
2333 LOGE("Error, No ZSL stream identified");
2334 pthread_mutex_unlock(&mMutex);
2335 return -EINVAL;
2336 }
2337 } else if (m_bIs4KVideo) {
2338 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2339 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2340 } else if (bYuv888OverrideJpeg) {
2341 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2342 (int32_t)largeYuv888Size.width;
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2344 (int32_t)largeYuv888Size.height;
2345 }
2346 break;
2347 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2348 case HAL_PIXEL_FORMAT_RAW16:
2349 case HAL_PIXEL_FORMAT_RAW10:
2350 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2351 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2352 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002353 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2354 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2355 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2356 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2357 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2358 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2359 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2360 gCamCapability[mCameraId]->dt[mPDIndex];
2361 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2362 gCamCapability[mCameraId]->vc[mPDIndex];
2363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 break;
2365 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2369 break;
2370 }
2371 }
2372
2373 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2374 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2375 gCamCapability[mCameraId]->color_arrangement);
2376
2377 if (newStream->priv == NULL) {
2378 //New stream, construct channel
2379 switch (newStream->stream_type) {
2380 case CAMERA3_STREAM_INPUT:
2381 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2382 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2383 break;
2384 case CAMERA3_STREAM_BIDIRECTIONAL:
2385 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2386 GRALLOC_USAGE_HW_CAMERA_WRITE;
2387 break;
2388 case CAMERA3_STREAM_OUTPUT:
2389 /* For video encoding stream, set read/write rarely
2390 * flag so that they may be set to un-cached */
2391 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2392 newStream->usage |=
2393 (GRALLOC_USAGE_SW_READ_RARELY |
2394 GRALLOC_USAGE_SW_WRITE_RARELY |
2395 GRALLOC_USAGE_HW_CAMERA_WRITE);
2396 else if (IS_USAGE_ZSL(newStream->usage))
2397 {
2398 LOGD("ZSL usage flag skipping");
2399 }
2400 else if (newStream == zslStream
2401 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2402 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2403 } else
2404 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2405 break;
2406 default:
2407 LOGE("Invalid stream_type %d", newStream->stream_type);
2408 break;
2409 }
2410
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002411 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002412 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2413 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2414 QCamera3ProcessingChannel *channel = NULL;
2415 switch (newStream->format) {
2416 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2417 if ((newStream->usage &
2418 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2419 (streamList->operation_mode ==
2420 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2421 ) {
2422 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2423 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002424 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002425 this,
2426 newStream,
2427 (cam_stream_type_t)
2428 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2430 mMetadataChannel,
2431 0); //heap buffers are not required for HFR video channel
2432 if (channel == NULL) {
2433 LOGE("allocation of channel failed");
2434 pthread_mutex_unlock(&mMutex);
2435 return -ENOMEM;
2436 }
2437 //channel->getNumBuffers() will return 0 here so use
2438 //MAX_INFLIGH_HFR_REQUESTS
2439 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2440 newStream->priv = channel;
2441 LOGI("num video buffers in HFR mode: %d",
2442 MAX_INFLIGHT_HFR_REQUESTS);
2443 } else {
2444 /* Copy stream contents in HFR preview only case to create
2445 * dummy batch channel so that sensor streaming is in
2446 * HFR mode */
2447 if (!m_bIsVideo && (streamList->operation_mode ==
2448 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2449 mDummyBatchStream = *newStream;
2450 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002451 int bufferCount = MAX_INFLIGHT_REQUESTS;
2452 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2453 CAM_STREAM_TYPE_VIDEO) {
2454 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2455 bufferCount = MAX_VIDEO_BUFFERS;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2458 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002459 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002460 this,
2461 newStream,
2462 (cam_stream_type_t)
2463 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2465 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002467 if (channel == NULL) {
2468 LOGE("allocation of channel failed");
2469 pthread_mutex_unlock(&mMutex);
2470 return -ENOMEM;
2471 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 /* disable UBWC for preview, though supported,
2473 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002474 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002475 (previewSize.width == (int32_t)videoWidth)&&
2476 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002477 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002479 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 newStream->max_buffers = channel->getNumBuffers();
2481 newStream->priv = channel;
2482 }
2483 break;
2484 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2485 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2486 mChannelHandle,
2487 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel);
2495 if (channel == NULL) {
2496 LOGE("allocation of YUV channel failed");
2497 pthread_mutex_unlock(&mMutex);
2498 return -ENOMEM;
2499 }
2500 newStream->max_buffers = channel->getNumBuffers();
2501 newStream->priv = channel;
2502 break;
2503 }
2504 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2505 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002506 case HAL_PIXEL_FORMAT_RAW10: {
2507 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2508 (HAL_DATASPACE_DEPTH != newStream->data_space))
2509 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mRawChannel = new QCamera3RawChannel(
2511 mCameraHandle->camera_handle, mChannelHandle,
2512 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002513 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002514 this, newStream,
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002516 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002517 if (mRawChannel == NULL) {
2518 LOGE("allocation of raw channel failed");
2519 pthread_mutex_unlock(&mMutex);
2520 return -ENOMEM;
2521 }
2522 newStream->max_buffers = mRawChannel->getNumBuffers();
2523 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2524 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002525 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002527 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2528 mDepthChannel = new QCamera3DepthChannel(
2529 mCameraHandle->camera_handle, mChannelHandle,
2530 mCameraHandle->ops, NULL, NULL, &padding_info,
2531 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2532 mMetadataChannel);
2533 if (NULL == mDepthChannel) {
2534 LOGE("Allocation of depth channel failed");
2535 pthread_mutex_unlock(&mMutex);
2536 return NO_MEMORY;
2537 }
2538 newStream->priv = mDepthChannel;
2539 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2540 } else {
2541 // Max live snapshot inflight buffer is 1. This is to mitigate
2542 // frame drop issues for video snapshot. The more buffers being
2543 // allocated, the more frame drops there are.
2544 mPictureChannel = new QCamera3PicChannel(
2545 mCameraHandle->camera_handle, mChannelHandle,
2546 mCameraHandle->ops, captureResultCb,
2547 setBufferErrorStatus, &padding_info, this, newStream,
2548 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2549 m_bIs4KVideo, isZsl, mMetadataChannel,
2550 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2551 if (mPictureChannel == NULL) {
2552 LOGE("allocation of channel failed");
2553 pthread_mutex_unlock(&mMutex);
2554 return -ENOMEM;
2555 }
2556 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2557 newStream->max_buffers = mPictureChannel->getNumBuffers();
2558 mPictureChannel->overrideYuvSize(
2559 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2560 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002562 break;
2563
2564 default:
2565 LOGE("not a supported format 0x%x", newStream->format);
2566 break;
2567 }
2568 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2569 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2570 } else {
2571 LOGE("Error, Unknown stream type");
2572 pthread_mutex_unlock(&mMutex);
2573 return -EINVAL;
2574 }
2575
2576 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002577 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2578 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002579 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002580 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2582 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2583 }
2584 }
2585
2586 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2587 it != mStreamInfo.end(); it++) {
2588 if ((*it)->stream == newStream) {
2589 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2590 break;
2591 }
2592 }
2593 } else {
2594 // Channel already exists for this stream
2595 // Do nothing for now
2596 }
2597 padding_info = gCamCapability[mCameraId]->padding_info;
2598
Emilian Peev7650c122017-01-19 08:24:33 -08002599 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 * since there is no real stream associated with it
2601 */
Emilian Peev7650c122017-01-19 08:24:33 -08002602 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002603 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2604 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 }
2608
Thierry Strudel2896d122017-02-23 19:18:03 -08002609 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2610 onlyRaw = false;
2611 }
2612
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002613 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002615 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002616 cam_analysis_info_t analysisInfo;
2617 int32_t ret = NO_ERROR;
2618 ret = mCommon.getAnalysisInfo(
2619 FALSE,
2620 analysisFeatureMask,
2621 &analysisInfo);
2622 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002623 cam_color_filter_arrangement_t analysis_color_arrangement =
2624 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2625 CAM_FILTER_ARRANGEMENT_Y :
2626 gCamCapability[mCameraId]->color_arrangement);
2627 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2628 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002629 cam_dimension_t analysisDim;
2630 analysisDim = mCommon.getMatchingDimension(previewSize,
2631 analysisInfo.analysis_recommended_res);
2632
2633 mAnalysisChannel = new QCamera3SupportChannel(
2634 mCameraHandle->camera_handle,
2635 mChannelHandle,
2636 mCameraHandle->ops,
2637 &analysisInfo.analysis_padding_info,
2638 analysisFeatureMask,
2639 CAM_STREAM_TYPE_ANALYSIS,
2640 &analysisDim,
2641 (analysisInfo.analysis_format
2642 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2643 : CAM_FORMAT_YUV_420_NV21),
2644 analysisInfo.hw_analysis_supported,
2645 gCamCapability[mCameraId]->color_arrangement,
2646 this,
2647 0); // force buffer count to 0
2648 } else {
2649 LOGW("getAnalysisInfo failed, ret = %d", ret);
2650 }
2651 if (!mAnalysisChannel) {
2652 LOGW("Analysis channel cannot be created");
2653 }
2654 }
2655
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 //RAW DUMP channel
2657 if (mEnableRawDump && isRawStreamRequested == false){
2658 cam_dimension_t rawDumpSize;
2659 rawDumpSize = getMaxRawSize(mCameraId);
2660 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2661 setPAAFSupport(rawDumpFeatureMask,
2662 CAM_STREAM_TYPE_RAW,
2663 gCamCapability[mCameraId]->color_arrangement);
2664 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2665 mChannelHandle,
2666 mCameraHandle->ops,
2667 rawDumpSize,
2668 &padding_info,
2669 this, rawDumpFeatureMask);
2670 if (!mRawDumpChannel) {
2671 LOGE("Raw Dump channel cannot be created");
2672 pthread_mutex_unlock(&mMutex);
2673 return -ENOMEM;
2674 }
2675 }
2676
Chien-Yu Chenee335912017-02-09 17:53:20 -08002677 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002678 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002679 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002680 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2681 "HDR+ RAW source channel is not created.",
2682 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002683 } else {
2684 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2685 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2686 setPAAFSupport(hdrPlusRawFeatureMask,
2687 CAM_STREAM_TYPE_RAW,
2688 gCamCapability[mCameraId]->color_arrangement);
2689 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2690 mChannelHandle,
2691 mCameraHandle->ops,
2692 rawSize,
2693 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002694 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002695 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002696 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002697 if (!mHdrPlusRawSrcChannel) {
2698 LOGE("HDR+ Raw Source channel cannot be created");
2699 pthread_mutex_unlock(&mMutex);
2700 return -ENOMEM;
2701 }
2702 }
2703 }
2704
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 if (mAnalysisChannel) {
2706 cam_analysis_info_t analysisInfo;
2707 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2708 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2709 CAM_STREAM_TYPE_ANALYSIS;
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2711 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002712 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2714 &analysisInfo);
2715 if (rc != NO_ERROR) {
2716 LOGE("getAnalysisInfo failed, ret = %d", rc);
2717 pthread_mutex_unlock(&mMutex);
2718 return rc;
2719 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002720 cam_color_filter_arrangement_t analysis_color_arrangement =
2721 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2722 CAM_FILTER_ARRANGEMENT_Y :
2723 gCamCapability[mCameraId]->color_arrangement);
2724 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2725 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2726 analysis_color_arrangement);
2727
Thierry Strudel3d639192016-09-09 11:52:26 -07002728 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002729 mCommon.getMatchingDimension(previewSize,
2730 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 mStreamConfigInfo.num_streams++;
2732 }
2733
Thierry Strudel2896d122017-02-23 19:18:03 -08002734 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 cam_analysis_info_t supportInfo;
2736 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2737 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2738 setPAAFSupport(callbackFeatureMask,
2739 CAM_STREAM_TYPE_CALLBACK,
2740 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002741 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002742 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002743 if (ret != NO_ERROR) {
2744 /* Ignore the error for Mono camera
2745 * because the PAAF bit mask is only set
2746 * for CAM_STREAM_TYPE_ANALYSIS stream type
2747 */
2748 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2749 LOGW("getAnalysisInfo failed, ret = %d", ret);
2750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 }
2752 mSupportChannel = new QCamera3SupportChannel(
2753 mCameraHandle->camera_handle,
2754 mChannelHandle,
2755 mCameraHandle->ops,
2756 &gCamCapability[mCameraId]->padding_info,
2757 callbackFeatureMask,
2758 CAM_STREAM_TYPE_CALLBACK,
2759 &QCamera3SupportChannel::kDim,
2760 CAM_FORMAT_YUV_420_NV21,
2761 supportInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002763 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 if (!mSupportChannel) {
2765 LOGE("dummy channel cannot be created");
2766 pthread_mutex_unlock(&mMutex);
2767 return -ENOMEM;
2768 }
2769 }
2770
2771 if (mSupportChannel) {
2772 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2773 QCamera3SupportChannel::kDim;
2774 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2775 CAM_STREAM_TYPE_CALLBACK;
2776 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2777 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2778 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2779 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mStreamConfigInfo.num_streams++;
2782 }
2783
2784 if (mRawDumpChannel) {
2785 cam_dimension_t rawSize;
2786 rawSize = getMaxRawSize(mCameraId);
2787 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2788 rawSize;
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2790 CAM_STREAM_TYPE_RAW;
2791 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2792 CAM_QCOM_FEATURE_NONE;
2793 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2794 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2795 gCamCapability[mCameraId]->color_arrangement);
2796 mStreamConfigInfo.num_streams++;
2797 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002798
2799 if (mHdrPlusRawSrcChannel) {
2800 cam_dimension_t rawSize;
2801 rawSize = getMaxRawSize(mCameraId);
2802 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2803 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2804 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2805 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2806 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2807 gCamCapability[mCameraId]->color_arrangement);
2808 mStreamConfigInfo.num_streams++;
2809 }
2810
Thierry Strudel3d639192016-09-09 11:52:26 -07002811 /* In HFR mode, if video stream is not added, create a dummy channel so that
2812 * ISP can create a batch mode even for preview only case. This channel is
2813 * never 'start'ed (no stream-on), it is only 'initialized' */
2814 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2815 !m_bIsVideo) {
2816 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2817 setPAAFSupport(dummyFeatureMask,
2818 CAM_STREAM_TYPE_VIDEO,
2819 gCamCapability[mCameraId]->color_arrangement);
2820 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2821 mChannelHandle,
2822 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002823 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 this,
2825 &mDummyBatchStream,
2826 CAM_STREAM_TYPE_VIDEO,
2827 dummyFeatureMask,
2828 mMetadataChannel);
2829 if (NULL == mDummyBatchChannel) {
2830 LOGE("creation of mDummyBatchChannel failed."
2831 "Preview will use non-hfr sensor mode ");
2832 }
2833 }
2834 if (mDummyBatchChannel) {
2835 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2836 mDummyBatchStream.width;
2837 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2838 mDummyBatchStream.height;
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2840 CAM_STREAM_TYPE_VIDEO;
2841 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2842 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2843 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2844 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2845 gCamCapability[mCameraId]->color_arrangement);
2846 mStreamConfigInfo.num_streams++;
2847 }
2848
2849 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2850 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002851 m_bIs4KVideo ? 0 :
2852 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002853
2854 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2855 for (pendingRequestIterator i = mPendingRequestsList.begin();
2856 i != mPendingRequestsList.end();) {
2857 i = erasePendingRequest(i);
2858 }
2859 mPendingFrameDropList.clear();
2860 // Initialize/Reset the pending buffers list
2861 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2862 req.mPendingBufferList.clear();
2863 }
2864 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2865
Thierry Strudel3d639192016-09-09 11:52:26 -07002866 mCurJpegMeta.clear();
2867 //Get min frame duration for this streams configuration
2868 deriveMinFrameDuration();
2869
Chien-Yu Chenee335912017-02-09 17:53:20 -08002870 mFirstPreviewIntentSeen = false;
2871
2872 // Disable HRD+ if it's enabled;
2873 disableHdrPlusModeLocked();
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 // Update state
2876 mState = CONFIGURED;
2877
2878 pthread_mutex_unlock(&mMutex);
2879
2880 return rc;
2881}
2882
2883/*===========================================================================
2884 * FUNCTION : validateCaptureRequest
2885 *
2886 * DESCRIPTION: validate a capture request from camera service
2887 *
2888 * PARAMETERS :
2889 * @request : request from framework to process
2890 *
2891 * RETURN :
2892 *
2893 *==========================================================================*/
2894int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002895 camera3_capture_request_t *request,
2896 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002897{
2898 ssize_t idx = 0;
2899 const camera3_stream_buffer_t *b;
2900 CameraMetadata meta;
2901
2902 /* Sanity check the request */
2903 if (request == NULL) {
2904 LOGE("NULL capture request");
2905 return BAD_VALUE;
2906 }
2907
2908 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2909 /*settings cannot be null for the first request*/
2910 return BAD_VALUE;
2911 }
2912
2913 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002914 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2915 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002916 LOGE("Request %d: No output buffers provided!",
2917 __FUNCTION__, frameNumber);
2918 return BAD_VALUE;
2919 }
2920 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2921 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2922 request->num_output_buffers, MAX_NUM_STREAMS);
2923 return BAD_VALUE;
2924 }
2925 if (request->input_buffer != NULL) {
2926 b = request->input_buffer;
2927 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2928 LOGE("Request %d: Buffer %ld: Status not OK!",
2929 frameNumber, (long)idx);
2930 return BAD_VALUE;
2931 }
2932 if (b->release_fence != -1) {
2933 LOGE("Request %d: Buffer %ld: Has a release fence!",
2934 frameNumber, (long)idx);
2935 return BAD_VALUE;
2936 }
2937 if (b->buffer == NULL) {
2938 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2939 frameNumber, (long)idx);
2940 return BAD_VALUE;
2941 }
2942 }
2943
2944 // Validate all buffers
2945 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002946 if (b == NULL) {
2947 return BAD_VALUE;
2948 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002949 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002950 QCamera3ProcessingChannel *channel =
2951 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2952 if (channel == NULL) {
2953 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2954 frameNumber, (long)idx);
2955 return BAD_VALUE;
2956 }
2957 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2958 LOGE("Request %d: Buffer %ld: Status not OK!",
2959 frameNumber, (long)idx);
2960 return BAD_VALUE;
2961 }
2962 if (b->release_fence != -1) {
2963 LOGE("Request %d: Buffer %ld: Has a release fence!",
2964 frameNumber, (long)idx);
2965 return BAD_VALUE;
2966 }
2967 if (b->buffer == NULL) {
2968 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2969 frameNumber, (long)idx);
2970 return BAD_VALUE;
2971 }
2972 if (*(b->buffer) == NULL) {
2973 LOGE("Request %d: Buffer %ld: NULL private handle!",
2974 frameNumber, (long)idx);
2975 return BAD_VALUE;
2976 }
2977 idx++;
2978 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002980 return NO_ERROR;
2981}
2982
2983/*===========================================================================
2984 * FUNCTION : deriveMinFrameDuration
2985 *
2986 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2987 * on currently configured streams.
2988 *
2989 * PARAMETERS : NONE
2990 *
2991 * RETURN : NONE
2992 *
2993 *==========================================================================*/
2994void QCamera3HardwareInterface::deriveMinFrameDuration()
2995{
2996 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2997
2998 maxJpegDim = 0;
2999 maxProcessedDim = 0;
3000 maxRawDim = 0;
3001
3002 // Figure out maximum jpeg, processed, and raw dimensions
3003 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3004 it != mStreamInfo.end(); it++) {
3005
3006 // Input stream doesn't have valid stream_type
3007 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3008 continue;
3009
3010 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3011 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3012 if (dimension > maxJpegDim)
3013 maxJpegDim = dimension;
3014 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3015 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3016 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3017 if (dimension > maxRawDim)
3018 maxRawDim = dimension;
3019 } else {
3020 if (dimension > maxProcessedDim)
3021 maxProcessedDim = dimension;
3022 }
3023 }
3024
3025 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3026 MAX_SIZES_CNT);
3027
3028 //Assume all jpeg dimensions are in processed dimensions.
3029 if (maxJpegDim > maxProcessedDim)
3030 maxProcessedDim = maxJpegDim;
3031 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3032 if (maxProcessedDim > maxRawDim) {
3033 maxRawDim = INT32_MAX;
3034
3035 for (size_t i = 0; i < count; i++) {
3036 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3037 gCamCapability[mCameraId]->raw_dim[i].height;
3038 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3039 maxRawDim = dimension;
3040 }
3041 }
3042
3043 //Find minimum durations for processed, jpeg, and raw
3044 for (size_t i = 0; i < count; i++) {
3045 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3046 gCamCapability[mCameraId]->raw_dim[i].height) {
3047 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3048 break;
3049 }
3050 }
3051 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3052 for (size_t i = 0; i < count; i++) {
3053 if (maxProcessedDim ==
3054 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3055 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3056 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3057 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3058 break;
3059 }
3060 }
3061}
3062
3063/*===========================================================================
3064 * FUNCTION : getMinFrameDuration
3065 *
3066 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3067 * and current request configuration.
3068 *
3069 * PARAMETERS : @request: requset sent by the frameworks
3070 *
3071 * RETURN : min farme duration for a particular request
3072 *
3073 *==========================================================================*/
3074int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3075{
3076 bool hasJpegStream = false;
3077 bool hasRawStream = false;
3078 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3079 const camera3_stream_t *stream = request->output_buffers[i].stream;
3080 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3081 hasJpegStream = true;
3082 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3083 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3084 stream->format == HAL_PIXEL_FORMAT_RAW16)
3085 hasRawStream = true;
3086 }
3087
3088 if (!hasJpegStream)
3089 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3090 else
3091 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3092}
3093
3094/*===========================================================================
3095 * FUNCTION : handleBuffersDuringFlushLock
3096 *
3097 * DESCRIPTION: Account for buffers returned from back-end during flush
3098 * This function is executed while mMutex is held by the caller.
3099 *
3100 * PARAMETERS :
3101 * @buffer: image buffer for the callback
3102 *
3103 * RETURN :
3104 *==========================================================================*/
3105void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3106{
3107 bool buffer_found = false;
3108 for (List<PendingBuffersInRequest>::iterator req =
3109 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3110 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3111 for (List<PendingBufferInfo>::iterator i =
3112 req->mPendingBufferList.begin();
3113 i != req->mPendingBufferList.end(); i++) {
3114 if (i->buffer == buffer->buffer) {
3115 mPendingBuffersMap.numPendingBufsAtFlush--;
3116 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3117 buffer->buffer, req->frame_number,
3118 mPendingBuffersMap.numPendingBufsAtFlush);
3119 buffer_found = true;
3120 break;
3121 }
3122 }
3123 if (buffer_found) {
3124 break;
3125 }
3126 }
3127 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3128 //signal the flush()
3129 LOGD("All buffers returned to HAL. Continue flush");
3130 pthread_cond_signal(&mBuffersCond);
3131 }
3132}
3133
Thierry Strudel3d639192016-09-09 11:52:26 -07003134/*===========================================================================
3135 * FUNCTION : handleBatchMetadata
3136 *
3137 * DESCRIPTION: Handles metadata buffer callback in batch mode
3138 *
3139 * PARAMETERS : @metadata_buf: metadata buffer
3140 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3141 * the meta buf in this method
3142 *
3143 * RETURN :
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::handleBatchMetadata(
3147 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3148{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003149 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003150
3151 if (NULL == metadata_buf) {
3152 LOGE("metadata_buf is NULL");
3153 return;
3154 }
3155 /* In batch mode, the metdata will contain the frame number and timestamp of
3156 * the last frame in the batch. Eg: a batch containing buffers from request
3157 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3158 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3159 * multiple process_capture_results */
3160 metadata_buffer_t *metadata =
3161 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3162 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3163 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3164 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3165 uint32_t frame_number = 0, urgent_frame_number = 0;
3166 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3167 bool invalid_metadata = false;
3168 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3169 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003170 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003171
3172 int32_t *p_frame_number_valid =
3173 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3174 uint32_t *p_frame_number =
3175 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3176 int64_t *p_capture_time =
3177 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3178 int32_t *p_urgent_frame_number_valid =
3179 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3180 uint32_t *p_urgent_frame_number =
3181 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3182
3183 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3184 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3185 (NULL == p_urgent_frame_number)) {
3186 LOGE("Invalid metadata");
3187 invalid_metadata = true;
3188 } else {
3189 frame_number_valid = *p_frame_number_valid;
3190 last_frame_number = *p_frame_number;
3191 last_frame_capture_time = *p_capture_time;
3192 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3193 last_urgent_frame_number = *p_urgent_frame_number;
3194 }
3195
3196 /* In batchmode, when no video buffers are requested, set_parms are sent
3197 * for every capture_request. The difference between consecutive urgent
3198 * frame numbers and frame numbers should be used to interpolate the
3199 * corresponding frame numbers and time stamps */
3200 pthread_mutex_lock(&mMutex);
3201 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003202 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3203 if(idx < 0) {
3204 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3205 last_urgent_frame_number);
3206 mState = ERROR;
3207 pthread_mutex_unlock(&mMutex);
3208 return;
3209 }
3210 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003211 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3212 first_urgent_frame_number;
3213
3214 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3215 urgent_frame_number_valid,
3216 first_urgent_frame_number, last_urgent_frame_number);
3217 }
3218
3219 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003220 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3221 if(idx < 0) {
3222 LOGE("Invalid frame number received: %d. Irrecoverable error",
3223 last_frame_number);
3224 mState = ERROR;
3225 pthread_mutex_unlock(&mMutex);
3226 return;
3227 }
3228 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003229 frameNumDiff = last_frame_number + 1 -
3230 first_frame_number;
3231 mPendingBatchMap.removeItem(last_frame_number);
3232
3233 LOGD("frm: valid: %d frm_num: %d - %d",
3234 frame_number_valid,
3235 first_frame_number, last_frame_number);
3236
3237 }
3238 pthread_mutex_unlock(&mMutex);
3239
3240 if (urgent_frame_number_valid || frame_number_valid) {
3241 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3242 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3243 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3244 urgentFrameNumDiff, last_urgent_frame_number);
3245 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3246 LOGE("frameNumDiff: %d frameNum: %d",
3247 frameNumDiff, last_frame_number);
3248 }
3249
3250 for (size_t i = 0; i < loopCount; i++) {
3251 /* handleMetadataWithLock is called even for invalid_metadata for
3252 * pipeline depth calculation */
3253 if (!invalid_metadata) {
3254 /* Infer frame number. Batch metadata contains frame number of the
3255 * last frame */
3256 if (urgent_frame_number_valid) {
3257 if (i < urgentFrameNumDiff) {
3258 urgent_frame_number =
3259 first_urgent_frame_number + i;
3260 LOGD("inferred urgent frame_number: %d",
3261 urgent_frame_number);
3262 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3263 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3264 } else {
3265 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3266 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3267 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3268 }
3269 }
3270
3271 /* Infer frame number. Batch metadata contains frame number of the
3272 * last frame */
3273 if (frame_number_valid) {
3274 if (i < frameNumDiff) {
3275 frame_number = first_frame_number + i;
3276 LOGD("inferred frame_number: %d", frame_number);
3277 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3278 CAM_INTF_META_FRAME_NUMBER, frame_number);
3279 } else {
3280 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3281 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3282 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3283 }
3284 }
3285
3286 if (last_frame_capture_time) {
3287 //Infer timestamp
3288 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003289 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003291 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003292 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3293 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3294 LOGD("batch capture_time: %lld, capture_time: %lld",
3295 last_frame_capture_time, capture_time);
3296 }
3297 }
3298 pthread_mutex_lock(&mMutex);
3299 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003300 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003301 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3302 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003303 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003304 pthread_mutex_unlock(&mMutex);
3305 }
3306
3307 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003308 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003309 mMetadataChannel->bufDone(metadata_buf);
3310 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003311 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003312 }
3313}
3314
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003315void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3316 camera3_error_msg_code_t errorCode)
3317{
3318 camera3_notify_msg_t notify_msg;
3319 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3320 notify_msg.type = CAMERA3_MSG_ERROR;
3321 notify_msg.message.error.error_code = errorCode;
3322 notify_msg.message.error.error_stream = NULL;
3323 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003324 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003325
3326 return;
3327}
Thierry Strudel3d639192016-09-09 11:52:26 -07003328/*===========================================================================
3329 * FUNCTION : handleMetadataWithLock
3330 *
3331 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3332 *
3333 * PARAMETERS : @metadata_buf: metadata buffer
3334 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3335 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003336 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3337 * last urgent metadata in a batch. Always true for non-batch mode
3338 * @lastMetadataInBatch: Boolean to indicate whether this is the
3339 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3341 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003342 *
3343 * RETURN :
3344 *
3345 *==========================================================================*/
3346void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003347 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003348 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3349 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3353 //during flush do not send metadata from this thread
3354 LOGD("not sending metadata during flush or when mState is error");
3355 if (free_and_bufdone_meta_buf) {
3356 mMetadataChannel->bufDone(metadata_buf);
3357 free(metadata_buf);
3358 }
3359 return;
3360 }
3361
3362 //not in flush
3363 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3364 int32_t frame_number_valid, urgent_frame_number_valid;
3365 uint32_t frame_number, urgent_frame_number;
3366 int64_t capture_time;
3367 nsecs_t currentSysTime;
3368
3369 int32_t *p_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3372 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3373 int32_t *p_urgent_frame_number_valid =
3374 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3375 uint32_t *p_urgent_frame_number =
3376 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3377 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3378 metadata) {
3379 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3380 *p_frame_number_valid, *p_frame_number);
3381 }
3382
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003383 camera_metadata_t *resultMetadata = nullptr;
3384
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3386 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3387 LOGE("Invalid metadata");
3388 if (free_and_bufdone_meta_buf) {
3389 mMetadataChannel->bufDone(metadata_buf);
3390 free(metadata_buf);
3391 }
3392 goto done_metadata;
3393 }
3394 frame_number_valid = *p_frame_number_valid;
3395 frame_number = *p_frame_number;
3396 capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 urgent_frame_number = *p_urgent_frame_number;
3399 currentSysTime = systemTime(CLOCK_MONOTONIC);
3400
3401 // Detect if buffers from any requests are overdue
3402 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003403 int64_t timeout;
3404 {
3405 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3406 // If there is a pending HDR+ request, the following requests may be blocked until the
3407 // HDR+ request is done. So allow a longer timeout.
3408 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3409 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3410 }
3411
3412 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003413 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003414 assert(missed.stream->priv);
3415 if (missed.stream->priv) {
3416 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3417 assert(ch->mStreams[0]);
3418 if (ch->mStreams[0]) {
3419 LOGE("Cancel missing frame = %d, buffer = %p,"
3420 "stream type = %d, stream format = %d",
3421 req.frame_number, missed.buffer,
3422 ch->mStreams[0]->getMyType(), missed.stream->format);
3423 ch->timeoutFrame(req.frame_number);
3424 }
3425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 }
3427 }
3428 }
3429 //Partial result on process_capture_result for timestamp
3430 if (urgent_frame_number_valid) {
3431 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3432 urgent_frame_number, capture_time);
3433
3434 //Recieved an urgent Frame Number, handle it
3435 //using partial results
3436 for (pendingRequestIterator i =
3437 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3438 LOGD("Iterator Frame = %d urgent frame = %d",
3439 i->frame_number, urgent_frame_number);
3440
3441 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3442 (i->partial_result_cnt == 0)) {
3443 LOGE("Error: HAL missed urgent metadata for frame number %d",
3444 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003445 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003446 }
3447
3448 if (i->frame_number == urgent_frame_number &&
3449 i->bUrgentReceived == 0) {
3450
3451 camera3_capture_result_t result;
3452 memset(&result, 0, sizeof(camera3_capture_result_t));
3453
3454 i->partial_result_cnt++;
3455 i->bUrgentReceived = 1;
3456 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003457 result.result = translateCbUrgentMetadataToResultMetadata(
3458 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 // Populate metadata result
3460 result.frame_number = urgent_frame_number;
3461 result.num_output_buffers = 0;
3462 result.output_buffers = NULL;
3463 result.partial_result = i->partial_result_cnt;
3464
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003465 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003466 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003467 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003468 result.partial_result == PARTIAL_RESULT_COUNT);
3469 }
3470
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003471 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003472 LOGD("urgent frame_number = %u, capture_time = %lld",
3473 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003474 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3475 // Instant AEC settled for this frame.
3476 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3477 mInstantAECSettledFrameNumber = urgent_frame_number;
3478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 free_camera_metadata((camera_metadata_t *)result.result);
3480 break;
3481 }
3482 }
3483 }
3484
3485 if (!frame_number_valid) {
3486 LOGD("Not a valid normal frame number, used as SOF only");
3487 if (free_and_bufdone_meta_buf) {
3488 mMetadataChannel->bufDone(metadata_buf);
3489 free(metadata_buf);
3490 }
3491 goto done_metadata;
3492 }
3493 LOGH("valid frame_number = %u, capture_time = %lld",
3494 frame_number, capture_time);
3495
Emilian Peev7650c122017-01-19 08:24:33 -08003496 if (metadata->is_depth_data_valid) {
3497 handleDepthDataLocked(metadata->depth_data, frame_number);
3498 }
3499
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003500 // Check whether any stream buffer corresponding to this is dropped or not
3501 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3502 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3503 for (auto & pendingRequest : mPendingRequestsList) {
3504 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3505 mInstantAECSettledFrameNumber)) {
3506 camera3_notify_msg_t notify_msg = {};
3507 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003508 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003509 QCamera3ProcessingChannel *channel =
3510 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003511 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003512 if (p_cam_frame_drop) {
3513 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003514 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003515 // Got the stream ID for drop frame.
3516 dropFrame = true;
3517 break;
3518 }
3519 }
3520 } else {
3521 // This is instant AEC case.
3522 // For instant AEC drop the stream untill AEC is settled.
3523 dropFrame = true;
3524 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003525
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003526 if (dropFrame) {
3527 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3528 if (p_cam_frame_drop) {
3529 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003531 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003532 } else {
3533 // For instant AEC, inform frame drop and frame number
3534 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3535 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003536 pendingRequest.frame_number, streamID,
3537 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003538 }
3539 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003540 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003541 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003542 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003543 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003544 if (p_cam_frame_drop) {
3545 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003546 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 } else {
3549 // For instant AEC, inform frame drop and frame number
3550 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3551 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003552 pendingRequest.frame_number, streamID,
3553 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 }
3555 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003556 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 PendingFrameDrop.stream_ID = streamID;
3558 // Add the Frame drop info to mPendingFrameDropList
3559 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 }
3562 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003565 for (auto & pendingRequest : mPendingRequestsList) {
3566 // Find the pending request with the frame number.
3567 if (pendingRequest.frame_number == frame_number) {
3568 // Update the sensor timestamp.
3569 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003570
Thierry Strudel3d639192016-09-09 11:52:26 -07003571
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003572 /* Set the timestamp in display metadata so that clients aware of
3573 private_handle such as VT can use this un-modified timestamps.
3574 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003575 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003576
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 // Find channel requiring metadata, meaning internal offline postprocess
3578 // is needed.
3579 //TODO: for now, we don't support two streams requiring metadata at the same time.
3580 // (because we are not making copies, and metadata buffer is not reference counted.
3581 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3583 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 if (iter->need_metadata) {
3585 internalPproc = true;
3586 QCamera3ProcessingChannel *channel =
3587 (QCamera3ProcessingChannel *)iter->stream->priv;
3588 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003589 if(p_is_metabuf_queued != NULL) {
3590 *p_is_metabuf_queued = true;
3591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 break;
3593 }
3594 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003595 for (auto itr = pendingRequest.internalRequestList.begin();
3596 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003597 if (itr->need_metadata) {
3598 internalPproc = true;
3599 QCamera3ProcessingChannel *channel =
3600 (QCamera3ProcessingChannel *)itr->stream->priv;
3601 channel->queueReprocMetadata(metadata_buf);
3602 break;
3603 }
3604 }
3605
Thierry Strudel54dc9782017-02-15 12:12:10 -08003606 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003607 resultMetadata = translateFromHalMetadata(metadata,
3608 pendingRequest.timestamp, pendingRequest.request_id,
3609 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3610 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003611 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003612 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003613 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003614 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003615 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003616 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003618 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003619
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003620 if (pendingRequest.blob_request) {
3621 //Dump tuning metadata if enabled and available
3622 char prop[PROPERTY_VALUE_MAX];
3623 memset(prop, 0, sizeof(prop));
3624 property_get("persist.camera.dumpmetadata", prop, "0");
3625 int32_t enabled = atoi(prop);
3626 if (enabled && metadata->is_tuning_params_valid) {
3627 dumpMetadataToFile(metadata->tuning_params,
3628 mMetaFrameCount,
3629 enabled,
3630 "Snapshot",
3631 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003632 }
3633 }
3634
3635 if (!internalPproc) {
3636 LOGD("couldn't find need_metadata for this metadata");
3637 // Return metadata buffer
3638 if (free_and_bufdone_meta_buf) {
3639 mMetadataChannel->bufDone(metadata_buf);
3640 free(metadata_buf);
3641 }
3642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003643
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 }
3646 }
3647
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 // Try to send out shutter callbacks and capture results.
3649 handlePendingResultsWithLock(frame_number, resultMetadata);
3650 return;
3651
Thierry Strudel3d639192016-09-09 11:52:26 -07003652done_metadata:
3653 for (pendingRequestIterator i = mPendingRequestsList.begin();
3654 i != mPendingRequestsList.end() ;i++) {
3655 i->pipeline_depth++;
3656 }
3657 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3658 unblockRequestIfNecessary();
3659}
3660
3661/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003662 * FUNCTION : handleDepthDataWithLock
3663 *
3664 * DESCRIPTION: Handles incoming depth data
3665 *
3666 * PARAMETERS : @depthData : Depth data
3667 * @frameNumber: Frame number of the incoming depth data
3668 *
3669 * RETURN :
3670 *
3671 *==========================================================================*/
3672void QCamera3HardwareInterface::handleDepthDataLocked(
3673 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3674 uint32_t currentFrameNumber;
3675 buffer_handle_t *depthBuffer;
3676
3677 if (nullptr == mDepthChannel) {
3678 LOGE("Depth channel not present!");
3679 return;
3680 }
3681
3682 camera3_stream_buffer_t resultBuffer =
3683 {.acquire_fence = -1,
3684 .release_fence = -1,
3685 .status = CAMERA3_BUFFER_STATUS_OK,
3686 .buffer = nullptr,
3687 .stream = mDepthChannel->getStream()};
3688 camera3_capture_result_t result =
3689 {.result = nullptr,
3690 .num_output_buffers = 1,
3691 .output_buffers = &resultBuffer,
3692 .partial_result = 0,
3693 .frame_number = 0};
3694
3695 do {
3696 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3697 if (nullptr == depthBuffer) {
3698 break;
3699 }
3700
3701 result.frame_number = currentFrameNumber;
3702 resultBuffer.buffer = depthBuffer;
3703 if (currentFrameNumber == frameNumber) {
3704 int32_t rc = mDepthChannel->populateDepthData(depthData,
3705 frameNumber);
3706 if (NO_ERROR != rc) {
3707 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3708 } else {
3709 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3710 }
3711 } else if (currentFrameNumber > frameNumber) {
3712 break;
3713 } else {
3714 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3715 {{currentFrameNumber, mDepthChannel->getStream(),
3716 CAMERA3_MSG_ERROR_BUFFER}}};
3717 orchestrateNotify(&notify_msg);
3718
3719 LOGE("Depth buffer for frame number: %d is missing "
3720 "returning back!", currentFrameNumber);
3721 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3722 }
3723 mDepthChannel->unmapBuffer(currentFrameNumber);
3724
3725 orchestrateResult(&result);
3726 } while (currentFrameNumber < frameNumber);
3727}
3728
3729/*===========================================================================
3730 * FUNCTION : notifyErrorFoPendingDepthData
3731 *
3732 * DESCRIPTION: Returns error for any pending depth buffers
3733 *
3734 * PARAMETERS : depthCh - depth channel that needs to get flushed
3735 *
3736 * RETURN :
3737 *
3738 *==========================================================================*/
3739void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3740 QCamera3DepthChannel *depthCh) {
3741 uint32_t currentFrameNumber;
3742 buffer_handle_t *depthBuffer;
3743
3744 if (nullptr == depthCh) {
3745 return;
3746 }
3747
3748 camera3_notify_msg_t notify_msg =
3749 {.type = CAMERA3_MSG_ERROR,
3750 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3751 camera3_stream_buffer_t resultBuffer =
3752 {.acquire_fence = -1,
3753 .release_fence = -1,
3754 .buffer = nullptr,
3755 .stream = depthCh->getStream(),
3756 .status = CAMERA3_BUFFER_STATUS_ERROR};
3757 camera3_capture_result_t result =
3758 {.result = nullptr,
3759 .frame_number = 0,
3760 .num_output_buffers = 1,
3761 .partial_result = 0,
3762 .output_buffers = &resultBuffer};
3763
3764 while (nullptr !=
3765 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3766 depthCh->unmapBuffer(currentFrameNumber);
3767
3768 notify_msg.message.error.frame_number = currentFrameNumber;
3769 orchestrateNotify(&notify_msg);
3770
3771 resultBuffer.buffer = depthBuffer;
3772 result.frame_number = currentFrameNumber;
3773 orchestrateResult(&result);
3774 };
3775}
3776
3777/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003778 * FUNCTION : hdrPlusPerfLock
3779 *
3780 * DESCRIPTION: perf lock for HDR+ using custom intent
3781 *
3782 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3783 *
3784 * RETURN : None
3785 *
3786 *==========================================================================*/
3787void QCamera3HardwareInterface::hdrPlusPerfLock(
3788 mm_camera_super_buf_t *metadata_buf)
3789{
3790 if (NULL == metadata_buf) {
3791 LOGE("metadata_buf is NULL");
3792 return;
3793 }
3794 metadata_buffer_t *metadata =
3795 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3796 int32_t *p_frame_number_valid =
3797 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3798 uint32_t *p_frame_number =
3799 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3800
3801 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3802 LOGE("%s: Invalid metadata", __func__);
3803 return;
3804 }
3805
3806 //acquire perf lock for 5 sec after the last HDR frame is captured
3807 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3808 if ((p_frame_number != NULL) &&
3809 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003810 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003811 }
3812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813}
3814
3815/*===========================================================================
3816 * FUNCTION : handleInputBufferWithLock
3817 *
3818 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3819 *
3820 * PARAMETERS : @frame_number: frame number of the input buffer
3821 *
3822 * RETURN :
3823 *
3824 *==========================================================================*/
3825void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3826{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003827 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003828 pendingRequestIterator i = mPendingRequestsList.begin();
3829 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3830 i++;
3831 }
3832 if (i != mPendingRequestsList.end() && i->input_buffer) {
3833 //found the right request
3834 if (!i->shutter_notified) {
3835 CameraMetadata settings;
3836 camera3_notify_msg_t notify_msg;
3837 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3838 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3839 if(i->settings) {
3840 settings = i->settings;
3841 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3842 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3843 } else {
3844 LOGE("No timestamp in input settings! Using current one.");
3845 }
3846 } else {
3847 LOGE("Input settings missing!");
3848 }
3849
3850 notify_msg.type = CAMERA3_MSG_SHUTTER;
3851 notify_msg.message.shutter.frame_number = frame_number;
3852 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003853 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003854 i->shutter_notified = true;
3855 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3856 i->frame_number, notify_msg.message.shutter.timestamp);
3857 }
3858
3859 if (i->input_buffer->release_fence != -1) {
3860 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3861 close(i->input_buffer->release_fence);
3862 if (rc != OK) {
3863 LOGE("input buffer sync wait failed %d", rc);
3864 }
3865 }
3866
3867 camera3_capture_result result;
3868 memset(&result, 0, sizeof(camera3_capture_result));
3869 result.frame_number = frame_number;
3870 result.result = i->settings;
3871 result.input_buffer = i->input_buffer;
3872 result.partial_result = PARTIAL_RESULT_COUNT;
3873
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003874 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875 LOGD("Input request metadata and input buffer frame_number = %u",
3876 i->frame_number);
3877 i = erasePendingRequest(i);
3878 } else {
3879 LOGE("Could not find input request for frame number %d", frame_number);
3880 }
3881}
3882
3883/*===========================================================================
3884 * FUNCTION : handleBufferWithLock
3885 *
3886 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3887 *
3888 * PARAMETERS : @buffer: image buffer for the callback
3889 * @frame_number: frame number of the image buffer
3890 *
3891 * RETURN :
3892 *
3893 *==========================================================================*/
3894void QCamera3HardwareInterface::handleBufferWithLock(
3895 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3896{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003897 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003898
3899 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3900 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3901 }
3902
Thierry Strudel3d639192016-09-09 11:52:26 -07003903 /* Nothing to be done during error state */
3904 if ((ERROR == mState) || (DEINIT == mState)) {
3905 return;
3906 }
3907 if (mFlushPerf) {
3908 handleBuffersDuringFlushLock(buffer);
3909 return;
3910 }
3911 //not in flush
3912 // If the frame number doesn't exist in the pending request list,
3913 // directly send the buffer to the frameworks, and update pending buffers map
3914 // Otherwise, book-keep the buffer.
3915 pendingRequestIterator i = mPendingRequestsList.begin();
3916 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3917 i++;
3918 }
3919 if (i == mPendingRequestsList.end()) {
3920 // Verify all pending requests frame_numbers are greater
3921 for (pendingRequestIterator j = mPendingRequestsList.begin();
3922 j != mPendingRequestsList.end(); j++) {
3923 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3924 LOGW("Error: pending live frame number %d is smaller than %d",
3925 j->frame_number, frame_number);
3926 }
3927 }
3928 camera3_capture_result_t result;
3929 memset(&result, 0, sizeof(camera3_capture_result_t));
3930 result.result = NULL;
3931 result.frame_number = frame_number;
3932 result.num_output_buffers = 1;
3933 result.partial_result = 0;
3934 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3935 m != mPendingFrameDropList.end(); m++) {
3936 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3937 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3938 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3939 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3940 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3941 frame_number, streamID);
3942 m = mPendingFrameDropList.erase(m);
3943 break;
3944 }
3945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003946 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003947 result.output_buffers = buffer;
3948 LOGH("result frame_number = %d, buffer = %p",
3949 frame_number, buffer->buffer);
3950
3951 mPendingBuffersMap.removeBuf(buffer->buffer);
3952
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003953 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003954 } else {
3955 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003956 if (i->input_buffer->release_fence != -1) {
3957 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3958 close(i->input_buffer->release_fence);
3959 if (rc != OK) {
3960 LOGE("input buffer sync wait failed %d", rc);
3961 }
3962 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003963 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003964
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003965 // Put buffer into the pending request
3966 for (auto &requestedBuffer : i->buffers) {
3967 if (requestedBuffer.stream == buffer->stream) {
3968 if (requestedBuffer.buffer != nullptr) {
3969 LOGE("Error: buffer is already set");
3970 } else {
3971 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3972 sizeof(camera3_stream_buffer_t));
3973 *(requestedBuffer.buffer) = *buffer;
3974 LOGH("cache buffer %p at result frame_number %u",
3975 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003976 }
3977 }
3978 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003979
3980 if (i->input_buffer) {
3981 // For a reprocessing request, try to send out shutter callback and result metadata.
3982 handlePendingResultsWithLock(frame_number, nullptr);
3983 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003984 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003985
3986 if (mPreviewStarted == false) {
3987 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3988 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3989 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3990 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3991 mPreviewStarted = true;
3992
3993 // Set power hint for preview
3994 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3995 }
3996 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003997}
3998
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003999void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4000 const camera_metadata_t *resultMetadata)
4001{
4002 // Find the pending request for this result metadata.
4003 auto requestIter = mPendingRequestsList.begin();
4004 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4005 requestIter++;
4006 }
4007
4008 if (requestIter == mPendingRequestsList.end()) {
4009 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4010 return;
4011 }
4012
4013 // Update the result metadata
4014 requestIter->resultMetadata = resultMetadata;
4015
4016 // Check what type of request this is.
4017 bool liveRequest = false;
4018 if (requestIter->hdrplus) {
4019 // HDR+ request doesn't have partial results.
4020 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4021 } else if (requestIter->input_buffer != nullptr) {
4022 // Reprocessing request result is the same as settings.
4023 requestIter->resultMetadata = requestIter->settings;
4024 // Reprocessing request doesn't have partial results.
4025 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4026 } else {
4027 liveRequest = true;
4028 requestIter->partial_result_cnt++;
4029 mPendingLiveRequest--;
4030
4031 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004032 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4033 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004034 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4035 }
4036 }
4037
4038 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4039 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4040 bool readyToSend = true;
4041
4042 // Iterate through the pending requests to send out shutter callbacks and results that are
4043 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4044 // live requests that don't have result metadata yet.
4045 auto iter = mPendingRequestsList.begin();
4046 while (iter != mPendingRequestsList.end()) {
4047 // Check if current pending request is ready. If it's not ready, the following pending
4048 // requests are also not ready.
4049 if (readyToSend && iter->resultMetadata == nullptr) {
4050 readyToSend = false;
4051 }
4052
4053 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4054
4055 std::vector<camera3_stream_buffer_t> outputBuffers;
4056
4057 camera3_capture_result_t result = {};
4058 result.frame_number = iter->frame_number;
4059 result.result = iter->resultMetadata;
4060 result.partial_result = iter->partial_result_cnt;
4061
4062 // If this pending buffer has result metadata, we may be able to send out shutter callback
4063 // and result metadata.
4064 if (iter->resultMetadata != nullptr) {
4065 if (!readyToSend) {
4066 // If any of the previous pending request is not ready, this pending request is
4067 // also not ready to send in order to keep shutter callbacks and result metadata
4068 // in order.
4069 iter++;
4070 continue;
4071 }
4072
4073 // Invoke shutter callback if not yet.
4074 if (!iter->shutter_notified) {
4075 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4076
4077 // Find the timestamp in HDR+ result metadata
4078 camera_metadata_ro_entry_t entry;
4079 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4080 ANDROID_SENSOR_TIMESTAMP, &entry);
4081 if (res != OK) {
4082 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4083 __FUNCTION__, iter->frame_number, strerror(-res), res);
4084 } else {
4085 timestamp = entry.data.i64[0];
4086 }
4087
4088 camera3_notify_msg_t notify_msg = {};
4089 notify_msg.type = CAMERA3_MSG_SHUTTER;
4090 notify_msg.message.shutter.frame_number = iter->frame_number;
4091 notify_msg.message.shutter.timestamp = timestamp;
4092 orchestrateNotify(&notify_msg);
4093 iter->shutter_notified = true;
4094 }
4095
4096 result.input_buffer = iter->input_buffer;
4097
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004098 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4099 // If the result metadata belongs to a live request, notify errors for previous pending
4100 // live requests.
4101 mPendingLiveRequest--;
4102
4103 CameraMetadata dummyMetadata;
4104 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4105 result.result = dummyMetadata.release();
4106
4107 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004108
4109 // partial_result should be PARTIAL_RESULT_CNT in case of
4110 // ERROR_RESULT.
4111 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4112 result.partial_result = PARTIAL_RESULT_COUNT;
4113
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004114 } else {
4115 iter++;
4116 continue;
4117 }
4118
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004119 // Prepare output buffer array
4120 for (auto bufferInfoIter = iter->buffers.begin();
4121 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4122 if (bufferInfoIter->buffer != nullptr) {
4123
4124 QCamera3Channel *channel =
4125 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4126 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4127
4128 // Check if this buffer is a dropped frame.
4129 auto frameDropIter = mPendingFrameDropList.begin();
4130 while (frameDropIter != mPendingFrameDropList.end()) {
4131 if((frameDropIter->stream_ID == streamID) &&
4132 (frameDropIter->frame_number == frameNumber)) {
4133 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4134 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4135 streamID);
4136 mPendingFrameDropList.erase(frameDropIter);
4137 break;
4138 } else {
4139 frameDropIter++;
4140 }
4141 }
4142
4143 // Check buffer error status
4144 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4145 bufferInfoIter->buffer->buffer);
4146 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4147
4148 outputBuffers.push_back(*(bufferInfoIter->buffer));
4149 free(bufferInfoIter->buffer);
4150 bufferInfoIter->buffer = NULL;
4151 }
4152 }
4153
4154 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4155 result.num_output_buffers = outputBuffers.size();
4156
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 orchestrateResult(&result);
4158
4159 // For reprocessing, result metadata is the same as settings so do not free it here to
4160 // avoid double free.
4161 if (result.result != iter->settings) {
4162 free_camera_metadata((camera_metadata_t *)result.result);
4163 }
4164 iter->resultMetadata = nullptr;
4165 iter = erasePendingRequest(iter);
4166 }
4167
4168 if (liveRequest) {
4169 for (auto &iter : mPendingRequestsList) {
4170 // Increment pipeline depth for the following pending requests.
4171 if (iter.frame_number > frameNumber) {
4172 iter.pipeline_depth++;
4173 }
4174 }
4175 }
4176
4177 unblockRequestIfNecessary();
4178}
4179
Thierry Strudel3d639192016-09-09 11:52:26 -07004180/*===========================================================================
4181 * FUNCTION : unblockRequestIfNecessary
4182 *
4183 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4184 * that mMutex is held when this function is called.
4185 *
4186 * PARAMETERS :
4187 *
4188 * RETURN :
4189 *
4190 *==========================================================================*/
4191void QCamera3HardwareInterface::unblockRequestIfNecessary()
4192{
4193 // Unblock process_capture_request
4194 pthread_cond_signal(&mRequestCond);
4195}
4196
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004197/*===========================================================================
4198 * FUNCTION : isHdrSnapshotRequest
4199 *
4200 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4201 *
4202 * PARAMETERS : camera3 request structure
4203 *
4204 * RETURN : boolean decision variable
4205 *
4206 *==========================================================================*/
4207bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4208{
4209 if (request == NULL) {
4210 LOGE("Invalid request handle");
4211 assert(0);
4212 return false;
4213 }
4214
4215 if (!mForceHdrSnapshot) {
4216 CameraMetadata frame_settings;
4217 frame_settings = request->settings;
4218
4219 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4220 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4221 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4222 return false;
4223 }
4224 } else {
4225 return false;
4226 }
4227
4228 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4229 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4230 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4231 return false;
4232 }
4233 } else {
4234 return false;
4235 }
4236 }
4237
4238 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4239 if (request->output_buffers[i].stream->format
4240 == HAL_PIXEL_FORMAT_BLOB) {
4241 return true;
4242 }
4243 }
4244
4245 return false;
4246}
4247/*===========================================================================
4248 * FUNCTION : orchestrateRequest
4249 *
4250 * DESCRIPTION: Orchestrates a capture request from camera service
4251 *
4252 * PARAMETERS :
4253 * @request : request from framework to process
4254 *
4255 * RETURN : Error status codes
4256 *
4257 *==========================================================================*/
4258int32_t QCamera3HardwareInterface::orchestrateRequest(
4259 camera3_capture_request_t *request)
4260{
4261
4262 uint32_t originalFrameNumber = request->frame_number;
4263 uint32_t originalOutputCount = request->num_output_buffers;
4264 const camera_metadata_t *original_settings = request->settings;
4265 List<InternalRequest> internallyRequestedStreams;
4266 List<InternalRequest> emptyInternalList;
4267
4268 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4269 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4270 uint32_t internalFrameNumber;
4271 CameraMetadata modified_meta;
4272
4273
4274 /* Add Blob channel to list of internally requested streams */
4275 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4276 if (request->output_buffers[i].stream->format
4277 == HAL_PIXEL_FORMAT_BLOB) {
4278 InternalRequest streamRequested;
4279 streamRequested.meteringOnly = 1;
4280 streamRequested.need_metadata = 0;
4281 streamRequested.stream = request->output_buffers[i].stream;
4282 internallyRequestedStreams.push_back(streamRequested);
4283 }
4284 }
4285 request->num_output_buffers = 0;
4286 auto itr = internallyRequestedStreams.begin();
4287
4288 /* Modify setting to set compensation */
4289 modified_meta = request->settings;
4290 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4291 uint8_t aeLock = 1;
4292 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4293 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4294 camera_metadata_t *modified_settings = modified_meta.release();
4295 request->settings = modified_settings;
4296
4297 /* Capture Settling & -2x frame */
4298 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4299 request->frame_number = internalFrameNumber;
4300 processCaptureRequest(request, internallyRequestedStreams);
4301
4302 request->num_output_buffers = originalOutputCount;
4303 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4304 request->frame_number = internalFrameNumber;
4305 processCaptureRequest(request, emptyInternalList);
4306 request->num_output_buffers = 0;
4307
4308 modified_meta = modified_settings;
4309 expCompensation = 0;
4310 aeLock = 1;
4311 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4312 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4313 modified_settings = modified_meta.release();
4314 request->settings = modified_settings;
4315
4316 /* Capture Settling & 0X frame */
4317
4318 itr = internallyRequestedStreams.begin();
4319 if (itr == internallyRequestedStreams.end()) {
4320 LOGE("Error Internally Requested Stream list is empty");
4321 assert(0);
4322 } else {
4323 itr->need_metadata = 0;
4324 itr->meteringOnly = 1;
4325 }
4326
4327 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4328 request->frame_number = internalFrameNumber;
4329 processCaptureRequest(request, internallyRequestedStreams);
4330
4331 itr = internallyRequestedStreams.begin();
4332 if (itr == internallyRequestedStreams.end()) {
4333 ALOGE("Error Internally Requested Stream list is empty");
4334 assert(0);
4335 } else {
4336 itr->need_metadata = 1;
4337 itr->meteringOnly = 0;
4338 }
4339
4340 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4341 request->frame_number = internalFrameNumber;
4342 processCaptureRequest(request, internallyRequestedStreams);
4343
4344 /* Capture 2X frame*/
4345 modified_meta = modified_settings;
4346 expCompensation = GB_HDR_2X_STEP_EV;
4347 aeLock = 1;
4348 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4349 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4350 modified_settings = modified_meta.release();
4351 request->settings = modified_settings;
4352
4353 itr = internallyRequestedStreams.begin();
4354 if (itr == internallyRequestedStreams.end()) {
4355 ALOGE("Error Internally Requested Stream list is empty");
4356 assert(0);
4357 } else {
4358 itr->need_metadata = 0;
4359 itr->meteringOnly = 1;
4360 }
4361 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4362 request->frame_number = internalFrameNumber;
4363 processCaptureRequest(request, internallyRequestedStreams);
4364
4365 itr = internallyRequestedStreams.begin();
4366 if (itr == internallyRequestedStreams.end()) {
4367 ALOGE("Error Internally Requested Stream list is empty");
4368 assert(0);
4369 } else {
4370 itr->need_metadata = 1;
4371 itr->meteringOnly = 0;
4372 }
4373
4374 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4375 request->frame_number = internalFrameNumber;
4376 processCaptureRequest(request, internallyRequestedStreams);
4377
4378
4379 /* Capture 2X on original streaming config*/
4380 internallyRequestedStreams.clear();
4381
4382 /* Restore original settings pointer */
4383 request->settings = original_settings;
4384 } else {
4385 uint32_t internalFrameNumber;
4386 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 return processCaptureRequest(request, internallyRequestedStreams);
4389 }
4390
4391 return NO_ERROR;
4392}
4393
4394/*===========================================================================
4395 * FUNCTION : orchestrateResult
4396 *
4397 * DESCRIPTION: Orchestrates a capture result to camera service
4398 *
4399 * PARAMETERS :
4400 * @request : request from framework to process
4401 *
4402 * RETURN :
4403 *
4404 *==========================================================================*/
4405void QCamera3HardwareInterface::orchestrateResult(
4406 camera3_capture_result_t *result)
4407{
4408 uint32_t frameworkFrameNumber;
4409 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4410 frameworkFrameNumber);
4411 if (rc != NO_ERROR) {
4412 LOGE("Cannot find translated frameworkFrameNumber");
4413 assert(0);
4414 } else {
4415 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004416 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004417 } else {
4418 result->frame_number = frameworkFrameNumber;
4419 mCallbackOps->process_capture_result(mCallbackOps, result);
4420 }
4421 }
4422}
4423
4424/*===========================================================================
4425 * FUNCTION : orchestrateNotify
4426 *
4427 * DESCRIPTION: Orchestrates a notify to camera service
4428 *
4429 * PARAMETERS :
4430 * @request : request from framework to process
4431 *
4432 * RETURN :
4433 *
4434 *==========================================================================*/
4435void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4436{
4437 uint32_t frameworkFrameNumber;
4438 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004439 int32_t rc = NO_ERROR;
4440
4441 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004442 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004443
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004444 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004445 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4446 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4447 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004448 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004449 LOGE("Cannot find translated frameworkFrameNumber");
4450 assert(0);
4451 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004452 }
4453 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004454
4455 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4456 LOGD("Internal Request drop the notifyCb");
4457 } else {
4458 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4459 mCallbackOps->notify(mCallbackOps, notify_msg);
4460 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004461}
4462
4463/*===========================================================================
4464 * FUNCTION : FrameNumberRegistry
4465 *
4466 * DESCRIPTION: Constructor
4467 *
4468 * PARAMETERS :
4469 *
4470 * RETURN :
4471 *
4472 *==========================================================================*/
4473FrameNumberRegistry::FrameNumberRegistry()
4474{
4475 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4476}
4477
4478/*===========================================================================
4479 * FUNCTION : ~FrameNumberRegistry
4480 *
4481 * DESCRIPTION: Destructor
4482 *
4483 * PARAMETERS :
4484 *
4485 * RETURN :
4486 *
4487 *==========================================================================*/
4488FrameNumberRegistry::~FrameNumberRegistry()
4489{
4490}
4491
4492/*===========================================================================
4493 * FUNCTION : PurgeOldEntriesLocked
4494 *
4495 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4496 *
4497 * PARAMETERS :
4498 *
4499 * RETURN : NONE
4500 *
4501 *==========================================================================*/
4502void FrameNumberRegistry::purgeOldEntriesLocked()
4503{
4504 while (_register.begin() != _register.end()) {
4505 auto itr = _register.begin();
4506 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4507 _register.erase(itr);
4508 } else {
4509 return;
4510 }
4511 }
4512}
4513
4514/*===========================================================================
4515 * FUNCTION : allocStoreInternalFrameNumber
4516 *
4517 * DESCRIPTION: Method to note down a framework request and associate a new
4518 * internal request number against it
4519 *
4520 * PARAMETERS :
4521 * @fFrameNumber: Identifier given by framework
4522 * @internalFN : Output parameter which will have the newly generated internal
4523 * entry
4524 *
4525 * RETURN : Error code
4526 *
4527 *==========================================================================*/
4528int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4529 uint32_t &internalFrameNumber)
4530{
4531 Mutex::Autolock lock(mRegistryLock);
4532 internalFrameNumber = _nextFreeInternalNumber++;
4533 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4534 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4535 purgeOldEntriesLocked();
4536 return NO_ERROR;
4537}
4538
4539/*===========================================================================
4540 * FUNCTION : generateStoreInternalFrameNumber
4541 *
4542 * DESCRIPTION: Method to associate a new internal request number independent
4543 * of any associate with framework requests
4544 *
4545 * PARAMETERS :
4546 * @internalFrame#: Output parameter which will have the newly generated internal
4547 *
4548 *
4549 * RETURN : Error code
4550 *
4551 *==========================================================================*/
4552int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4553{
4554 Mutex::Autolock lock(mRegistryLock);
4555 internalFrameNumber = _nextFreeInternalNumber++;
4556 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4557 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4558 purgeOldEntriesLocked();
4559 return NO_ERROR;
4560}
4561
4562/*===========================================================================
4563 * FUNCTION : getFrameworkFrameNumber
4564 *
4565 * DESCRIPTION: Method to query the framework framenumber given an internal #
4566 *
4567 * PARAMETERS :
4568 * @internalFrame#: Internal reference
4569 * @frameworkframenumber: Output parameter holding framework frame entry
4570 *
4571 * RETURN : Error code
4572 *
4573 *==========================================================================*/
4574int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4575 uint32_t &frameworkFrameNumber)
4576{
4577 Mutex::Autolock lock(mRegistryLock);
4578 auto itr = _register.find(internalFrameNumber);
4579 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004580 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004581 return -ENOENT;
4582 }
4583
4584 frameworkFrameNumber = itr->second;
4585 purgeOldEntriesLocked();
4586 return NO_ERROR;
4587}
Thierry Strudel3d639192016-09-09 11:52:26 -07004588
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004589status_t QCamera3HardwareInterface::fillPbStreamConfig(
4590 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4591 QCamera3Channel *channel, uint32_t streamIndex) {
4592 if (config == nullptr) {
4593 LOGE("%s: config is null", __FUNCTION__);
4594 return BAD_VALUE;
4595 }
4596
4597 if (channel == nullptr) {
4598 LOGE("%s: channel is null", __FUNCTION__);
4599 return BAD_VALUE;
4600 }
4601
4602 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4603 if (stream == nullptr) {
4604 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4605 return NAME_NOT_FOUND;
4606 }
4607
4608 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4609 if (streamInfo == nullptr) {
4610 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4611 return NAME_NOT_FOUND;
4612 }
4613
4614 config->id = pbStreamId;
4615 config->image.width = streamInfo->dim.width;
4616 config->image.height = streamInfo->dim.height;
4617 config->image.padding = 0;
4618 config->image.format = pbStreamFormat;
4619
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004620 uint32_t totalPlaneSize = 0;
4621
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004622 // Fill plane information.
4623 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4624 pbcamera::PlaneConfiguration plane;
4625 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4626 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4627 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004628
4629 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004630 }
4631
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004632 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004633 return OK;
4634}
4635
Thierry Strudel3d639192016-09-09 11:52:26 -07004636/*===========================================================================
4637 * FUNCTION : processCaptureRequest
4638 *
4639 * DESCRIPTION: process a capture request from camera service
4640 *
4641 * PARAMETERS :
4642 * @request : request from framework to process
4643 *
4644 * RETURN :
4645 *
4646 *==========================================================================*/
4647int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004648 camera3_capture_request_t *request,
4649 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004650{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004651 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004652 int rc = NO_ERROR;
4653 int32_t request_id;
4654 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004655 bool isVidBufRequested = false;
4656 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004657 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004658
4659 pthread_mutex_lock(&mMutex);
4660
4661 // Validate current state
4662 switch (mState) {
4663 case CONFIGURED:
4664 case STARTED:
4665 /* valid state */
4666 break;
4667
4668 case ERROR:
4669 pthread_mutex_unlock(&mMutex);
4670 handleCameraDeviceError();
4671 return -ENODEV;
4672
4673 default:
4674 LOGE("Invalid state %d", mState);
4675 pthread_mutex_unlock(&mMutex);
4676 return -ENODEV;
4677 }
4678
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004679 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004680 if (rc != NO_ERROR) {
4681 LOGE("incoming request is not valid");
4682 pthread_mutex_unlock(&mMutex);
4683 return rc;
4684 }
4685
4686 meta = request->settings;
4687
4688 // For first capture request, send capture intent, and
4689 // stream on all streams
4690 if (mState == CONFIGURED) {
4691 // send an unconfigure to the backend so that the isp
4692 // resources are deallocated
4693 if (!mFirstConfiguration) {
4694 cam_stream_size_info_t stream_config_info;
4695 int32_t hal_version = CAM_HAL_V3;
4696 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4697 stream_config_info.buffer_info.min_buffers =
4698 MIN_INFLIGHT_REQUESTS;
4699 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004700 m_bIs4KVideo ? 0 :
4701 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004702 clear_metadata_buffer(mParameters);
4703 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4704 CAM_INTF_PARM_HAL_VERSION, hal_version);
4705 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4706 CAM_INTF_META_STREAM_INFO, stream_config_info);
4707 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4708 mParameters);
4709 if (rc < 0) {
4710 LOGE("set_parms for unconfigure failed");
4711 pthread_mutex_unlock(&mMutex);
4712 return rc;
4713 }
4714 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004715 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004717 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004718 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004719 property_get("persist.camera.is_type", is_type_value, "4");
4720 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4721 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4722 property_get("persist.camera.is_type_preview", is_type_value, "4");
4723 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4724 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004725
4726 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4727 int32_t hal_version = CAM_HAL_V3;
4728 uint8_t captureIntent =
4729 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4730 mCaptureIntent = captureIntent;
4731 clear_metadata_buffer(mParameters);
4732 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4733 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4734 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004735 if (mFirstConfiguration) {
4736 // configure instant AEC
4737 // Instant AEC is a session based parameter and it is needed only
4738 // once per complete session after open camera.
4739 // i.e. This is set only once for the first capture request, after open camera.
4740 setInstantAEC(meta);
4741 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004742 uint8_t fwkVideoStabMode=0;
4743 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4744 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4745 }
4746
4747 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4748 // turn it on for video/preview
4749 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4750 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004751 int32_t vsMode;
4752 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4754 rc = BAD_VALUE;
4755 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004756 LOGD("setEis %d", setEis);
4757 bool eis3Supported = false;
4758 size_t count = IS_TYPE_MAX;
4759 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4760 for (size_t i = 0; i < count; i++) {
4761 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4762 eis3Supported = true;
4763 break;
4764 }
4765 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004766
4767 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004768 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004769 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4770 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004771 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4772 is_type = isTypePreview;
4773 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4774 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4775 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004776 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004777 } else {
4778 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004779 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004780 } else {
4781 is_type = IS_TYPE_NONE;
4782 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004784 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4786 }
4787 }
4788
4789 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4790 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4791
Thierry Strudel54dc9782017-02-15 12:12:10 -08004792 //Disable tintless only if the property is set to 0
4793 memset(prop, 0, sizeof(prop));
4794 property_get("persist.camera.tintless.enable", prop, "1");
4795 int32_t tintless_value = atoi(prop);
4796
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4798 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004799
Thierry Strudel3d639192016-09-09 11:52:26 -07004800 //Disable CDS for HFR mode or if DIS/EIS is on.
4801 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4802 //after every configure_stream
4803 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4804 (m_bIsVideo)) {
4805 int32_t cds = CAM_CDS_MODE_OFF;
4806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4807 CAM_INTF_PARM_CDS_MODE, cds))
4808 LOGE("Failed to disable CDS for HFR mode");
4809
4810 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004811
4812 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4813 uint8_t* use_av_timer = NULL;
4814
4815 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004816 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004817 use_av_timer = &m_debug_avtimer;
4818 }
4819 else{
4820 use_av_timer =
4821 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004822 if (use_av_timer) {
4823 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4824 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004825 }
4826
4827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4828 rc = BAD_VALUE;
4829 }
4830 }
4831
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 setMobicat();
4833
4834 /* Set fps and hfr mode while sending meta stream info so that sensor
4835 * can configure appropriate streaming mode */
4836 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4838 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4840 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 if (rc == NO_ERROR) {
4842 int32_t max_fps =
4843 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004844 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4846 }
4847 /* For HFR, more buffers are dequeued upfront to improve the performance */
4848 if (mBatchSize) {
4849 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4850 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4851 }
4852 }
4853 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 LOGE("setHalFpsRange failed");
4855 }
4856 }
4857 if (meta.exists(ANDROID_CONTROL_MODE)) {
4858 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4859 rc = extractSceneMode(meta, metaMode, mParameters);
4860 if (rc != NO_ERROR) {
4861 LOGE("extractSceneMode failed");
4862 }
4863 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004864 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004865
Thierry Strudel04e026f2016-10-10 11:27:36 -07004866 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4867 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4868 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4869 rc = setVideoHdrMode(mParameters, vhdr);
4870 if (rc != NO_ERROR) {
4871 LOGE("setVideoHDR is failed");
4872 }
4873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 //TODO: validate the arguments, HSV scenemode should have only the
4876 //advertised fps ranges
4877
4878 /*set the capture intent, hal version, tintless, stream info,
4879 *and disenable parameters to the backend*/
4880 LOGD("set_parms META_STREAM_INFO " );
4881 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004882 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4883 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 mStreamConfigInfo.type[i],
4885 mStreamConfigInfo.stream_sizes[i].width,
4886 mStreamConfigInfo.stream_sizes[i].height,
4887 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 mStreamConfigInfo.format[i],
4889 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004890 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4893 mParameters);
4894 if (rc < 0) {
4895 LOGE("set_parms failed for hal version, stream info");
4896 }
4897
Chien-Yu Chenee335912017-02-09 17:53:20 -08004898 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4899 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 if (rc != NO_ERROR) {
4901 LOGE("Failed to get sensor output size");
4902 pthread_mutex_unlock(&mMutex);
4903 goto error_exit;
4904 }
4905
4906 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4907 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004908 mSensorModeInfo.active_array_size.width,
4909 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004911 if (gHdrPlusClient != nullptr) {
4912 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4913 if (rc != OK) {
4914 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4915 mCameraId, mSensorModeInfo.op_pixel_clk);
4916 pthread_mutex_unlock(&mMutex);
4917 goto error_exit;
4918 }
4919 }
4920
Thierry Strudel3d639192016-09-09 11:52:26 -07004921 /* Set batchmode before initializing channel. Since registerBuffer
4922 * internally initializes some of the channels, better set batchmode
4923 * even before first register buffer */
4924 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4925 it != mStreamInfo.end(); it++) {
4926 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4927 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4928 && mBatchSize) {
4929 rc = channel->setBatchSize(mBatchSize);
4930 //Disable per frame map unmap for HFR/batchmode case
4931 rc |= channel->setPerFrameMapUnmap(false);
4932 if (NO_ERROR != rc) {
4933 LOGE("Channel init failed %d", rc);
4934 pthread_mutex_unlock(&mMutex);
4935 goto error_exit;
4936 }
4937 }
4938 }
4939
4940 //First initialize all streams
4941 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4942 it != mStreamInfo.end(); it++) {
4943 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4944 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4945 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 setEis) {
4947 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4948 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4949 is_type = mStreamConfigInfo.is_type[i];
4950 break;
4951 }
4952 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004954 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 rc = channel->initialize(IS_TYPE_NONE);
4956 }
4957 if (NO_ERROR != rc) {
4958 LOGE("Channel initialization failed %d", rc);
4959 pthread_mutex_unlock(&mMutex);
4960 goto error_exit;
4961 }
4962 }
4963
4964 if (mRawDumpChannel) {
4965 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4966 if (rc != NO_ERROR) {
4967 LOGE("Error: Raw Dump Channel init failed");
4968 pthread_mutex_unlock(&mMutex);
4969 goto error_exit;
4970 }
4971 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004972 if (mHdrPlusRawSrcChannel) {
4973 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4974 if (rc != NO_ERROR) {
4975 LOGE("Error: HDR+ RAW Source Channel init failed");
4976 pthread_mutex_unlock(&mMutex);
4977 goto error_exit;
4978 }
4979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 if (mSupportChannel) {
4981 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4982 if (rc < 0) {
4983 LOGE("Support channel initialization failed");
4984 pthread_mutex_unlock(&mMutex);
4985 goto error_exit;
4986 }
4987 }
4988 if (mAnalysisChannel) {
4989 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4990 if (rc < 0) {
4991 LOGE("Analysis channel initialization failed");
4992 pthread_mutex_unlock(&mMutex);
4993 goto error_exit;
4994 }
4995 }
4996 if (mDummyBatchChannel) {
4997 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4998 if (rc < 0) {
4999 LOGE("mDummyBatchChannel setBatchSize failed");
5000 pthread_mutex_unlock(&mMutex);
5001 goto error_exit;
5002 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005003 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (rc < 0) {
5005 LOGE("mDummyBatchChannel initialization failed");
5006 pthread_mutex_unlock(&mMutex);
5007 goto error_exit;
5008 }
5009 }
5010
5011 // Set bundle info
5012 rc = setBundleInfo();
5013 if (rc < 0) {
5014 LOGE("setBundleInfo failed %d", rc);
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018
5019 //update settings from app here
5020 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5021 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5022 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5023 }
5024 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5025 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5026 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5027 }
5028 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5029 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5030 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5031
5032 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5033 (mLinkedCameraId != mCameraId) ) {
5034 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5035 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 goto error_exit;
5038 }
5039 }
5040
5041 // add bundle related cameras
5042 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5043 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005044 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5045 &m_pDualCamCmdPtr->bundle_info;
5046 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 if (mIsDeviceLinked)
5048 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5049 else
5050 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5051
5052 pthread_mutex_lock(&gCamLock);
5053
5054 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5055 LOGE("Dualcam: Invalid Session Id ");
5056 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005057 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 goto error_exit;
5059 }
5060
5061 if (mIsMainCamera == 1) {
5062 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5063 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005064 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005065 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 // related session id should be session id of linked session
5067 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5068 } else {
5069 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5070 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005071 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005072 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5074 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005075 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 pthread_mutex_unlock(&gCamLock);
5077
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005078 rc = mCameraHandle->ops->set_dual_cam_cmd(
5079 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 if (rc < 0) {
5081 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005082 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005083 goto error_exit;
5084 }
5085 }
5086
5087 //Then start them.
5088 LOGH("Start META Channel");
5089 rc = mMetadataChannel->start();
5090 if (rc < 0) {
5091 LOGE("META channel start failed");
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095
5096 if (mAnalysisChannel) {
5097 rc = mAnalysisChannel->start();
5098 if (rc < 0) {
5099 LOGE("Analysis channel start failed");
5100 mMetadataChannel->stop();
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105
5106 if (mSupportChannel) {
5107 rc = mSupportChannel->start();
5108 if (rc < 0) {
5109 LOGE("Support channel start failed");
5110 mMetadataChannel->stop();
5111 /* Although support and analysis are mutually exclusive today
5112 adding it in anycase for future proofing */
5113 if (mAnalysisChannel) {
5114 mAnalysisChannel->stop();
5115 }
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
5120 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5121 it != mStreamInfo.end(); it++) {
5122 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5123 LOGH("Start Processing Channel mask=%d",
5124 channel->getStreamTypeMask());
5125 rc = channel->start();
5126 if (rc < 0) {
5127 LOGE("channel start failed");
5128 pthread_mutex_unlock(&mMutex);
5129 goto error_exit;
5130 }
5131 }
5132
5133 if (mRawDumpChannel) {
5134 LOGD("Starting raw dump stream");
5135 rc = mRawDumpChannel->start();
5136 if (rc != NO_ERROR) {
5137 LOGE("Error Starting Raw Dump Channel");
5138 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5139 it != mStreamInfo.end(); it++) {
5140 QCamera3Channel *channel =
5141 (QCamera3Channel *)(*it)->stream->priv;
5142 LOGH("Stopping Processing Channel mask=%d",
5143 channel->getStreamTypeMask());
5144 channel->stop();
5145 }
5146 if (mSupportChannel)
5147 mSupportChannel->stop();
5148 if (mAnalysisChannel) {
5149 mAnalysisChannel->stop();
5150 }
5151 mMetadataChannel->stop();
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156
5157 if (mChannelHandle) {
5158
5159 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5160 mChannelHandle);
5161 if (rc != NO_ERROR) {
5162 LOGE("start_channel failed %d", rc);
5163 pthread_mutex_unlock(&mMutex);
5164 goto error_exit;
5165 }
5166 }
5167
5168 goto no_error;
5169error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005170 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 return rc;
5172no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 mWokenUpByDaemon = false;
5174 mPendingLiveRequest = 0;
5175 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 }
5177
Chien-Yu Chenee335912017-02-09 17:53:20 -08005178 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005179 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005180 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5181 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5182 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5183 rc = enableHdrPlusModeLocked();
5184 if (rc != OK) {
5185 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5186 pthread_mutex_unlock(&mMutex);
5187 return rc;
5188 }
5189
5190 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5191 if (mHdrPlusRawSrcChannel) {
5192 rc = mHdrPlusRawSrcChannel->start();
5193 if (rc != OK) {
5194 LOGE("Error Starting HDR+ RAW Channel");
5195 pthread_mutex_unlock(&mMutex);
5196 return rc;
5197 }
5198 }
5199 mFirstPreviewIntentSeen = true;
5200 }
5201
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005203 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005204
5205 if (mFlushPerf) {
5206 //we cannot accept any requests during flush
5207 LOGE("process_capture_request cannot proceed during flush");
5208 pthread_mutex_unlock(&mMutex);
5209 return NO_ERROR; //should return an error
5210 }
5211
5212 if (meta.exists(ANDROID_REQUEST_ID)) {
5213 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5214 mCurrentRequestId = request_id;
5215 LOGD("Received request with id: %d", request_id);
5216 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5217 LOGE("Unable to find request id field, \
5218 & no previous id available");
5219 pthread_mutex_unlock(&mMutex);
5220 return NAME_NOT_FOUND;
5221 } else {
5222 LOGD("Re-using old request id");
5223 request_id = mCurrentRequestId;
5224 }
5225
5226 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5227 request->num_output_buffers,
5228 request->input_buffer,
5229 frameNumber);
5230 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005231 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005233 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 uint32_t snapshotStreamId = 0;
5235 for (size_t i = 0; i < request->num_output_buffers; i++) {
5236 const camera3_stream_buffer_t& output = request->output_buffers[i];
5237 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5238
Emilian Peev7650c122017-01-19 08:24:33 -08005239 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5240 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005241 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 blob_request = 1;
5243 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5244 }
5245
5246 if (output.acquire_fence != -1) {
5247 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5248 close(output.acquire_fence);
5249 if (rc != OK) {
5250 LOGE("sync wait failed %d", rc);
5251 pthread_mutex_unlock(&mMutex);
5252 return rc;
5253 }
5254 }
5255
Emilian Peev0f3c3162017-03-15 12:57:46 +00005256 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5257 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005258 depthRequestPresent = true;
5259 continue;
5260 }
5261
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005264
5265 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5266 isVidBufRequested = true;
5267 }
5268 }
5269
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005270 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5271 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5272 itr++) {
5273 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5274 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5275 channel->getStreamID(channel->getStreamTypeMask());
5276
5277 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5278 isVidBufRequested = true;
5279 }
5280 }
5281
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005284 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 }
5286 if (blob_request && mRawDumpChannel) {
5287 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005288 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005289 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 }
5292
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005293 {
5294 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5295 // Request a RAW buffer if
5296 // 1. mHdrPlusRawSrcChannel is valid.
5297 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5298 // 3. There is no pending HDR+ request.
5299 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5300 mHdrPlusPendingRequests.size() == 0) {
5301 streamsArray.stream_request[streamsArray.num_streams].streamID =
5302 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5303 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5304 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005305 }
5306
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005307 //extract capture intent
5308 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5309 mCaptureIntent =
5310 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5311 }
5312
5313 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5314 mCacMode =
5315 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5316 }
5317
5318 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005319 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005320
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005321 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005322 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005323 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5324 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005325 }
5326
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005327 if (hdrPlusRequest) {
5328 // For a HDR+ request, just set the frame parameters.
5329 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5330 if (rc < 0) {
5331 LOGE("fail to set frame parameters");
5332 pthread_mutex_unlock(&mMutex);
5333 return rc;
5334 }
5335 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 /* Parse the settings:
5337 * - For every request in NORMAL MODE
5338 * - For every request in HFR mode during preview only case
5339 * - For first request of every batch in HFR mode during video
5340 * recording. In batchmode the same settings except frame number is
5341 * repeated in each request of the batch.
5342 */
5343 if (!mBatchSize ||
5344 (mBatchSize && !isVidBufRequested) ||
5345 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 if (rc < 0) {
5348 LOGE("fail to set frame parameters");
5349 pthread_mutex_unlock(&mMutex);
5350 return rc;
5351 }
5352 }
5353 /* For batchMode HFR, setFrameParameters is not called for every
5354 * request. But only frame number of the latest request is parsed.
5355 * Keep track of first and last frame numbers in a batch so that
5356 * metadata for the frame numbers of batch can be duplicated in
5357 * handleBatchMetadta */
5358 if (mBatchSize) {
5359 if (!mToBeQueuedVidBufs) {
5360 //start of the batch
5361 mFirstFrameNumberInBatch = request->frame_number;
5362 }
5363 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5364 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5365 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005366 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005367 return BAD_VALUE;
5368 }
5369 }
5370 if (mNeedSensorRestart) {
5371 /* Unlock the mutex as restartSensor waits on the channels to be
5372 * stopped, which in turn calls stream callback functions -
5373 * handleBufferWithLock and handleMetadataWithLock */
5374 pthread_mutex_unlock(&mMutex);
5375 rc = dynamicUpdateMetaStreamInfo();
5376 if (rc != NO_ERROR) {
5377 LOGE("Restarting the sensor failed");
5378 return BAD_VALUE;
5379 }
5380 mNeedSensorRestart = false;
5381 pthread_mutex_lock(&mMutex);
5382 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005383 if(mResetInstantAEC) {
5384 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5385 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5386 mResetInstantAEC = false;
5387 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005388 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 if (request->input_buffer->acquire_fence != -1) {
5390 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5391 close(request->input_buffer->acquire_fence);
5392 if (rc != OK) {
5393 LOGE("input buffer sync wait failed %d", rc);
5394 pthread_mutex_unlock(&mMutex);
5395 return rc;
5396 }
5397 }
5398 }
5399
5400 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5401 mLastCustIntentFrmNum = frameNumber;
5402 }
5403 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005404 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 pendingRequestIterator latestRequest;
5406 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005407 pendingRequest.num_buffers = depthRequestPresent ?
5408 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 pendingRequest.request_id = request_id;
5410 pendingRequest.blob_request = blob_request;
5411 pendingRequest.timestamp = 0;
5412 pendingRequest.bUrgentReceived = 0;
5413 if (request->input_buffer) {
5414 pendingRequest.input_buffer =
5415 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5416 *(pendingRequest.input_buffer) = *(request->input_buffer);
5417 pInputBuffer = pendingRequest.input_buffer;
5418 } else {
5419 pendingRequest.input_buffer = NULL;
5420 pInputBuffer = NULL;
5421 }
5422
5423 pendingRequest.pipeline_depth = 0;
5424 pendingRequest.partial_result_cnt = 0;
5425 extractJpegMetadata(mCurJpegMeta, request);
5426 pendingRequest.jpegMetadata = mCurJpegMeta;
5427 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5428 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005430 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5431 mHybridAeEnable =
5432 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5433 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005434
5435 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5436 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005437 /* DevCamDebug metadata processCaptureRequest */
5438 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5439 mDevCamDebugMetaEnable =
5440 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5441 }
5442 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5443 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005444
5445 //extract CAC info
5446 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5447 mCacMode =
5448 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5449 }
5450 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005451 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005452
5453 PendingBuffersInRequest bufsForCurRequest;
5454 bufsForCurRequest.frame_number = frameNumber;
5455 // Mark current timestamp for the new request
5456 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005458
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005459 if (hdrPlusRequest) {
5460 // Save settings for this request.
5461 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5462 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5463
5464 // Add to pending HDR+ request queue.
5465 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5466 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5467
5468 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5469 }
5470
Thierry Strudel3d639192016-09-09 11:52:26 -07005471 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005472 if ((request->output_buffers[i].stream->data_space ==
5473 HAL_DATASPACE_DEPTH) &&
5474 (HAL_PIXEL_FORMAT_BLOB ==
5475 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005476 continue;
5477 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005478 RequestedBufferInfo requestedBuf;
5479 memset(&requestedBuf, 0, sizeof(requestedBuf));
5480 requestedBuf.stream = request->output_buffers[i].stream;
5481 requestedBuf.buffer = NULL;
5482 pendingRequest.buffers.push_back(requestedBuf);
5483
5484 // Add to buffer handle the pending buffers list
5485 PendingBufferInfo bufferInfo;
5486 bufferInfo.buffer = request->output_buffers[i].buffer;
5487 bufferInfo.stream = request->output_buffers[i].stream;
5488 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5489 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5490 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5491 frameNumber, bufferInfo.buffer,
5492 channel->getStreamTypeMask(), bufferInfo.stream->format);
5493 }
5494 // Add this request packet into mPendingBuffersMap
5495 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5496 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5497 mPendingBuffersMap.get_num_overall_buffers());
5498
5499 latestRequest = mPendingRequestsList.insert(
5500 mPendingRequestsList.end(), pendingRequest);
5501 if(mFlush) {
5502 LOGI("mFlush is true");
5503 pthread_mutex_unlock(&mMutex);
5504 return NO_ERROR;
5505 }
5506
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005507 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5508 // channel.
5509 if (!hdrPlusRequest) {
5510 int indexUsed;
5511 // Notify metadata channel we receive a request
5512 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005513
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005514 if(request->input_buffer != NULL){
5515 LOGD("Input request, frame_number %d", frameNumber);
5516 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5517 if (NO_ERROR != rc) {
5518 LOGE("fail to set reproc parameters");
5519 pthread_mutex_unlock(&mMutex);
5520 return rc;
5521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005522 }
5523
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005524 // Call request on other streams
5525 uint32_t streams_need_metadata = 0;
5526 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5527 for (size_t i = 0; i < request->num_output_buffers; i++) {
5528 const camera3_stream_buffer_t& output = request->output_buffers[i];
5529 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5530
5531 if (channel == NULL) {
5532 LOGW("invalid channel pointer for stream");
5533 continue;
5534 }
5535
5536 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5537 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5538 output.buffer, request->input_buffer, frameNumber);
5539 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005540 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5542 if (rc < 0) {
5543 LOGE("Fail to request on picture channel");
5544 pthread_mutex_unlock(&mMutex);
5545 return rc;
5546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005547 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005548 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5549 assert(NULL != mDepthChannel);
5550 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551
Emilian Peev7650c122017-01-19 08:24:33 -08005552 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5553 if (rc < 0) {
5554 LOGE("Fail to map on depth buffer");
5555 pthread_mutex_unlock(&mMutex);
5556 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 }
Emilian Peev7650c122017-01-19 08:24:33 -08005558 } else {
5559 LOGD("snapshot request with buffer %p, frame_number %d",
5560 output.buffer, frameNumber);
5561 if (!request->settings) {
5562 rc = channel->request(output.buffer, frameNumber,
5563 NULL, mPrevParameters, indexUsed);
5564 } else {
5565 rc = channel->request(output.buffer, frameNumber,
5566 NULL, mParameters, indexUsed);
5567 }
5568 if (rc < 0) {
5569 LOGE("Fail to request on picture channel");
5570 pthread_mutex_unlock(&mMutex);
5571 return rc;
5572 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573
Emilian Peev7650c122017-01-19 08:24:33 -08005574 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5575 uint32_t j = 0;
5576 for (j = 0; j < streamsArray.num_streams; j++) {
5577 if (streamsArray.stream_request[j].streamID == streamId) {
5578 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5579 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5580 else
5581 streamsArray.stream_request[j].buf_index = indexUsed;
5582 break;
5583 }
5584 }
5585 if (j == streamsArray.num_streams) {
5586 LOGE("Did not find matching stream to update index");
5587 assert(0);
5588 }
5589
5590 pendingBufferIter->need_metadata = true;
5591 streams_need_metadata++;
5592 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005593 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005594 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5595 bool needMetadata = false;
5596 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5597 rc = yuvChannel->request(output.buffer, frameNumber,
5598 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5599 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005601 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005602 pthread_mutex_unlock(&mMutex);
5603 return rc;
5604 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005605
5606 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5607 uint32_t j = 0;
5608 for (j = 0; j < streamsArray.num_streams; j++) {
5609 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5611 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5612 else
5613 streamsArray.stream_request[j].buf_index = indexUsed;
5614 break;
5615 }
5616 }
5617 if (j == streamsArray.num_streams) {
5618 LOGE("Did not find matching stream to update index");
5619 assert(0);
5620 }
5621
5622 pendingBufferIter->need_metadata = needMetadata;
5623 if (needMetadata)
5624 streams_need_metadata += 1;
5625 LOGD("calling YUV channel request, need_metadata is %d",
5626 needMetadata);
5627 } else {
5628 LOGD("request with buffer %p, frame_number %d",
5629 output.buffer, frameNumber);
5630
5631 rc = channel->request(output.buffer, frameNumber, indexUsed);
5632
5633 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5634 uint32_t j = 0;
5635 for (j = 0; j < streamsArray.num_streams; j++) {
5636 if (streamsArray.stream_request[j].streamID == streamId) {
5637 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5638 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5639 else
5640 streamsArray.stream_request[j].buf_index = indexUsed;
5641 break;
5642 }
5643 }
5644 if (j == streamsArray.num_streams) {
5645 LOGE("Did not find matching stream to update index");
5646 assert(0);
5647 }
5648
5649 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5650 && mBatchSize) {
5651 mToBeQueuedVidBufs++;
5652 if (mToBeQueuedVidBufs == mBatchSize) {
5653 channel->queueBatchBuf();
5654 }
5655 }
5656 if (rc < 0) {
5657 LOGE("request failed");
5658 pthread_mutex_unlock(&mMutex);
5659 return rc;
5660 }
5661 }
5662 pendingBufferIter++;
5663 }
5664
5665 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5666 itr++) {
5667 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5668
5669 if (channel == NULL) {
5670 LOGE("invalid channel pointer for stream");
5671 assert(0);
5672 return BAD_VALUE;
5673 }
5674
5675 InternalRequest requestedStream;
5676 requestedStream = (*itr);
5677
5678
5679 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5680 LOGD("snapshot request internally input buffer %p, frame_number %d",
5681 request->input_buffer, frameNumber);
5682 if(request->input_buffer != NULL){
5683 rc = channel->request(NULL, frameNumber,
5684 pInputBuffer, &mReprocMeta, indexUsed, true,
5685 requestedStream.meteringOnly);
5686 if (rc < 0) {
5687 LOGE("Fail to request on picture channel");
5688 pthread_mutex_unlock(&mMutex);
5689 return rc;
5690 }
5691 } else {
5692 LOGD("snapshot request with frame_number %d", frameNumber);
5693 if (!request->settings) {
5694 rc = channel->request(NULL, frameNumber,
5695 NULL, mPrevParameters, indexUsed, true,
5696 requestedStream.meteringOnly);
5697 } else {
5698 rc = channel->request(NULL, frameNumber,
5699 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5700 }
5701 if (rc < 0) {
5702 LOGE("Fail to request on picture channel");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
5706
5707 if ((*itr).meteringOnly != 1) {
5708 requestedStream.need_metadata = 1;
5709 streams_need_metadata++;
5710 }
5711 }
5712
5713 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5714 uint32_t j = 0;
5715 for (j = 0; j < streamsArray.num_streams; j++) {
5716 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005717 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5718 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5719 else
5720 streamsArray.stream_request[j].buf_index = indexUsed;
5721 break;
5722 }
5723 }
5724 if (j == streamsArray.num_streams) {
5725 LOGE("Did not find matching stream to update index");
5726 assert(0);
5727 }
5728
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005729 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005731 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005732 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005733 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005734 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005735 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005736
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005737 //If 2 streams have need_metadata set to true, fail the request, unless
5738 //we copy/reference count the metadata buffer
5739 if (streams_need_metadata > 1) {
5740 LOGE("not supporting request in which two streams requires"
5741 " 2 HAL metadata for reprocessing");
5742 pthread_mutex_unlock(&mMutex);
5743 return -EINVAL;
5744 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005745
Emilian Peev7650c122017-01-19 08:24:33 -08005746 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5748 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5749 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5750 pthread_mutex_unlock(&mMutex);
5751 return BAD_VALUE;
5752 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 if (request->input_buffer == NULL) {
5754 /* Set the parameters to backend:
5755 * - For every request in NORMAL MODE
5756 * - For every request in HFR mode during preview only case
5757 * - Once every batch in HFR mode during video recording
5758 */
5759 if (!mBatchSize ||
5760 (mBatchSize && !isVidBufRequested) ||
5761 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5762 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5763 mBatchSize, isVidBufRequested,
5764 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005765
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005766 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5767 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5768 uint32_t m = 0;
5769 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5770 if (streamsArray.stream_request[k].streamID ==
5771 mBatchedStreamsArray.stream_request[m].streamID)
5772 break;
5773 }
5774 if (m == mBatchedStreamsArray.num_streams) {
5775 mBatchedStreamsArray.stream_request\
5776 [mBatchedStreamsArray.num_streams].streamID =
5777 streamsArray.stream_request[k].streamID;
5778 mBatchedStreamsArray.stream_request\
5779 [mBatchedStreamsArray.num_streams].buf_index =
5780 streamsArray.stream_request[k].buf_index;
5781 mBatchedStreamsArray.num_streams =
5782 mBatchedStreamsArray.num_streams + 1;
5783 }
5784 }
5785 streamsArray = mBatchedStreamsArray;
5786 }
5787 /* Update stream id of all the requested buffers */
5788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5789 streamsArray)) {
5790 LOGE("Failed to set stream type mask in the parameters");
5791 return BAD_VALUE;
5792 }
5793
5794 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5795 mParameters);
5796 if (rc < 0) {
5797 LOGE("set_parms failed");
5798 }
5799 /* reset to zero coz, the batch is queued */
5800 mToBeQueuedVidBufs = 0;
5801 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5802 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5803 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005804 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5805 uint32_t m = 0;
5806 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5807 if (streamsArray.stream_request[k].streamID ==
5808 mBatchedStreamsArray.stream_request[m].streamID)
5809 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005810 }
5811 if (m == mBatchedStreamsArray.num_streams) {
5812 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5813 streamID = streamsArray.stream_request[k].streamID;
5814 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5815 buf_index = streamsArray.stream_request[k].buf_index;
5816 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5817 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005818 }
5819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005821 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005822 }
5823
5824 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5825
5826 mState = STARTED;
5827 // Added a timed condition wait
5828 struct timespec ts;
5829 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005830 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005831 if (rc < 0) {
5832 isValidTimeout = 0;
5833 LOGE("Error reading the real time clock!!");
5834 }
5835 else {
5836 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 int64_t timeout = 5;
5838 {
5839 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5840 // If there is a pending HDR+ request, the following requests may be blocked until the
5841 // HDR+ request is done. So allow a longer timeout.
5842 if (mHdrPlusPendingRequests.size() > 0) {
5843 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5844 }
5845 }
5846 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005847 }
5848 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005849 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005850 (mState != ERROR) && (mState != DEINIT)) {
5851 if (!isValidTimeout) {
5852 LOGD("Blocking on conditional wait");
5853 pthread_cond_wait(&mRequestCond, &mMutex);
5854 }
5855 else {
5856 LOGD("Blocking on timed conditional wait");
5857 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5858 if (rc == ETIMEDOUT) {
5859 rc = -ENODEV;
5860 LOGE("Unblocked on timeout!!!!");
5861 break;
5862 }
5863 }
5864 LOGD("Unblocked");
5865 if (mWokenUpByDaemon) {
5866 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005867 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005868 break;
5869 }
5870 }
5871 pthread_mutex_unlock(&mMutex);
5872
5873 return rc;
5874}
5875
5876/*===========================================================================
5877 * FUNCTION : dump
5878 *
5879 * DESCRIPTION:
5880 *
5881 * PARAMETERS :
5882 *
5883 *
5884 * RETURN :
5885 *==========================================================================*/
5886void QCamera3HardwareInterface::dump(int fd)
5887{
5888 pthread_mutex_lock(&mMutex);
5889 dprintf(fd, "\n Camera HAL3 information Begin \n");
5890
5891 dprintf(fd, "\nNumber of pending requests: %zu \n",
5892 mPendingRequestsList.size());
5893 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5894 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5895 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5896 for(pendingRequestIterator i = mPendingRequestsList.begin();
5897 i != mPendingRequestsList.end(); i++) {
5898 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5899 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5900 i->input_buffer);
5901 }
5902 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5903 mPendingBuffersMap.get_num_overall_buffers());
5904 dprintf(fd, "-------+------------------\n");
5905 dprintf(fd, " Frame | Stream type mask \n");
5906 dprintf(fd, "-------+------------------\n");
5907 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5908 for(auto &j : req.mPendingBufferList) {
5909 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5910 dprintf(fd, " %5d | %11d \n",
5911 req.frame_number, channel->getStreamTypeMask());
5912 }
5913 }
5914 dprintf(fd, "-------+------------------\n");
5915
5916 dprintf(fd, "\nPending frame drop list: %zu\n",
5917 mPendingFrameDropList.size());
5918 dprintf(fd, "-------+-----------\n");
5919 dprintf(fd, " Frame | Stream ID \n");
5920 dprintf(fd, "-------+-----------\n");
5921 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5922 i != mPendingFrameDropList.end(); i++) {
5923 dprintf(fd, " %5d | %9d \n",
5924 i->frame_number, i->stream_ID);
5925 }
5926 dprintf(fd, "-------+-----------\n");
5927
5928 dprintf(fd, "\n Camera HAL3 information End \n");
5929
5930 /* use dumpsys media.camera as trigger to send update debug level event */
5931 mUpdateDebugLevel = true;
5932 pthread_mutex_unlock(&mMutex);
5933 return;
5934}
5935
5936/*===========================================================================
5937 * FUNCTION : flush
5938 *
5939 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5940 * conditionally restarts channels
5941 *
5942 * PARAMETERS :
5943 * @ restartChannels: re-start all channels
5944 *
5945 *
5946 * RETURN :
5947 * 0 on success
5948 * Error code on failure
5949 *==========================================================================*/
5950int QCamera3HardwareInterface::flush(bool restartChannels)
5951{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005952 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005953 int32_t rc = NO_ERROR;
5954
5955 LOGD("Unblocking Process Capture Request");
5956 pthread_mutex_lock(&mMutex);
5957 mFlush = true;
5958 pthread_mutex_unlock(&mMutex);
5959
5960 rc = stopAllChannels();
5961 // unlink of dualcam
5962 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005963 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5964 &m_pDualCamCmdPtr->bundle_info;
5965 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005966 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5967 pthread_mutex_lock(&gCamLock);
5968
5969 if (mIsMainCamera == 1) {
5970 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5971 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005972 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005973 // related session id should be session id of linked session
5974 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5975 } else {
5976 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5977 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005978 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005979 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5980 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005981 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005982 pthread_mutex_unlock(&gCamLock);
5983
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005984 rc = mCameraHandle->ops->set_dual_cam_cmd(
5985 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005986 if (rc < 0) {
5987 LOGE("Dualcam: Unlink failed, but still proceed to close");
5988 }
5989 }
5990
5991 if (rc < 0) {
5992 LOGE("stopAllChannels failed");
5993 return rc;
5994 }
5995 if (mChannelHandle) {
5996 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5997 mChannelHandle);
5998 }
5999
6000 // Reset bundle info
6001 rc = setBundleInfo();
6002 if (rc < 0) {
6003 LOGE("setBundleInfo failed %d", rc);
6004 return rc;
6005 }
6006
6007 // Mutex Lock
6008 pthread_mutex_lock(&mMutex);
6009
6010 // Unblock process_capture_request
6011 mPendingLiveRequest = 0;
6012 pthread_cond_signal(&mRequestCond);
6013
6014 rc = notifyErrorForPendingRequests();
6015 if (rc < 0) {
6016 LOGE("notifyErrorForPendingRequests failed");
6017 pthread_mutex_unlock(&mMutex);
6018 return rc;
6019 }
6020
6021 mFlush = false;
6022
6023 // Start the Streams/Channels
6024 if (restartChannels) {
6025 rc = startAllChannels();
6026 if (rc < 0) {
6027 LOGE("startAllChannels failed");
6028 pthread_mutex_unlock(&mMutex);
6029 return rc;
6030 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006031 if (mChannelHandle) {
6032 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6033 mChannelHandle);
6034 if (rc < 0) {
6035 LOGE("start_channel failed");
6036 pthread_mutex_unlock(&mMutex);
6037 return rc;
6038 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006039 }
6040 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006041 pthread_mutex_unlock(&mMutex);
6042
6043 return 0;
6044}
6045
6046/*===========================================================================
6047 * FUNCTION : flushPerf
6048 *
6049 * DESCRIPTION: This is the performance optimization version of flush that does
6050 * not use stream off, rather flushes the system
6051 *
6052 * PARAMETERS :
6053 *
6054 *
6055 * RETURN : 0 : success
6056 * -EINVAL: input is malformed (device is not valid)
6057 * -ENODEV: if the device has encountered a serious error
6058 *==========================================================================*/
6059int QCamera3HardwareInterface::flushPerf()
6060{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006061 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006062 int32_t rc = 0;
6063 struct timespec timeout;
6064 bool timed_wait = false;
6065
6066 pthread_mutex_lock(&mMutex);
6067 mFlushPerf = true;
6068 mPendingBuffersMap.numPendingBufsAtFlush =
6069 mPendingBuffersMap.get_num_overall_buffers();
6070 LOGD("Calling flush. Wait for %d buffers to return",
6071 mPendingBuffersMap.numPendingBufsAtFlush);
6072
6073 /* send the flush event to the backend */
6074 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6075 if (rc < 0) {
6076 LOGE("Error in flush: IOCTL failure");
6077 mFlushPerf = false;
6078 pthread_mutex_unlock(&mMutex);
6079 return -ENODEV;
6080 }
6081
6082 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6083 LOGD("No pending buffers in HAL, return flush");
6084 mFlushPerf = false;
6085 pthread_mutex_unlock(&mMutex);
6086 return rc;
6087 }
6088
6089 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006090 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006091 if (rc < 0) {
6092 LOGE("Error reading the real time clock, cannot use timed wait");
6093 } else {
6094 timeout.tv_sec += FLUSH_TIMEOUT;
6095 timed_wait = true;
6096 }
6097
6098 //Block on conditional variable
6099 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6100 LOGD("Waiting on mBuffersCond");
6101 if (!timed_wait) {
6102 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6103 if (rc != 0) {
6104 LOGE("pthread_cond_wait failed due to rc = %s",
6105 strerror(rc));
6106 break;
6107 }
6108 } else {
6109 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6110 if (rc != 0) {
6111 LOGE("pthread_cond_timedwait failed due to rc = %s",
6112 strerror(rc));
6113 break;
6114 }
6115 }
6116 }
6117 if (rc != 0) {
6118 mFlushPerf = false;
6119 pthread_mutex_unlock(&mMutex);
6120 return -ENODEV;
6121 }
6122
6123 LOGD("Received buffers, now safe to return them");
6124
6125 //make sure the channels handle flush
6126 //currently only required for the picture channel to release snapshot resources
6127 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6128 it != mStreamInfo.end(); it++) {
6129 QCamera3Channel *channel = (*it)->channel;
6130 if (channel) {
6131 rc = channel->flush();
6132 if (rc) {
6133 LOGE("Flushing the channels failed with error %d", rc);
6134 // even though the channel flush failed we need to continue and
6135 // return the buffers we have to the framework, however the return
6136 // value will be an error
6137 rc = -ENODEV;
6138 }
6139 }
6140 }
6141
6142 /* notify the frameworks and send errored results */
6143 rc = notifyErrorForPendingRequests();
6144 if (rc < 0) {
6145 LOGE("notifyErrorForPendingRequests failed");
6146 pthread_mutex_unlock(&mMutex);
6147 return rc;
6148 }
6149
6150 //unblock process_capture_request
6151 mPendingLiveRequest = 0;
6152 unblockRequestIfNecessary();
6153
6154 mFlushPerf = false;
6155 pthread_mutex_unlock(&mMutex);
6156 LOGD ("Flush Operation complete. rc = %d", rc);
6157 return rc;
6158}
6159
6160/*===========================================================================
6161 * FUNCTION : handleCameraDeviceError
6162 *
6163 * DESCRIPTION: This function calls internal flush and notifies the error to
6164 * framework and updates the state variable.
6165 *
6166 * PARAMETERS : None
6167 *
6168 * RETURN : NO_ERROR on Success
6169 * Error code on failure
6170 *==========================================================================*/
6171int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6172{
6173 int32_t rc = NO_ERROR;
6174
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006175 {
6176 Mutex::Autolock lock(mFlushLock);
6177 pthread_mutex_lock(&mMutex);
6178 if (mState != ERROR) {
6179 //if mState != ERROR, nothing to be done
6180 pthread_mutex_unlock(&mMutex);
6181 return NO_ERROR;
6182 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006184
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006185 rc = flush(false /* restart channels */);
6186 if (NO_ERROR != rc) {
6187 LOGE("internal flush to handle mState = ERROR failed");
6188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006189
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006190 pthread_mutex_lock(&mMutex);
6191 mState = DEINIT;
6192 pthread_mutex_unlock(&mMutex);
6193 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006194
6195 camera3_notify_msg_t notify_msg;
6196 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6197 notify_msg.type = CAMERA3_MSG_ERROR;
6198 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6199 notify_msg.message.error.error_stream = NULL;
6200 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006201 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006202
6203 return rc;
6204}
6205
6206/*===========================================================================
6207 * FUNCTION : captureResultCb
6208 *
6209 * DESCRIPTION: Callback handler for all capture result
6210 * (streams, as well as metadata)
6211 *
6212 * PARAMETERS :
6213 * @metadata : metadata information
6214 * @buffer : actual gralloc buffer to be returned to frameworks.
6215 * NULL if metadata.
6216 *
6217 * RETURN : NONE
6218 *==========================================================================*/
6219void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6220 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6221{
6222 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006223 pthread_mutex_lock(&mMutex);
6224 uint8_t batchSize = mBatchSize;
6225 pthread_mutex_unlock(&mMutex);
6226 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006227 handleBatchMetadata(metadata_buf,
6228 true /* free_and_bufdone_meta_buf */);
6229 } else { /* mBatchSize = 0 */
6230 hdrPlusPerfLock(metadata_buf);
6231 pthread_mutex_lock(&mMutex);
6232 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006233 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006234 true /* last urgent frame of batch metadata */,
6235 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006236 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 pthread_mutex_unlock(&mMutex);
6238 }
6239 } else if (isInputBuffer) {
6240 pthread_mutex_lock(&mMutex);
6241 handleInputBufferWithLock(frame_number);
6242 pthread_mutex_unlock(&mMutex);
6243 } else {
6244 pthread_mutex_lock(&mMutex);
6245 handleBufferWithLock(buffer, frame_number);
6246 pthread_mutex_unlock(&mMutex);
6247 }
6248 return;
6249}
6250
6251/*===========================================================================
6252 * FUNCTION : getReprocessibleOutputStreamId
6253 *
6254 * DESCRIPTION: Get source output stream id for the input reprocess stream
6255 * based on size and format, which would be the largest
6256 * output stream if an input stream exists.
6257 *
6258 * PARAMETERS :
6259 * @id : return the stream id if found
6260 *
6261 * RETURN : int32_t type of status
6262 * NO_ERROR -- success
6263 * none-zero failure code
6264 *==========================================================================*/
6265int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6266{
6267 /* check if any output or bidirectional stream with the same size and format
6268 and return that stream */
6269 if ((mInputStreamInfo.dim.width > 0) &&
6270 (mInputStreamInfo.dim.height > 0)) {
6271 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6272 it != mStreamInfo.end(); it++) {
6273
6274 camera3_stream_t *stream = (*it)->stream;
6275 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6276 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6277 (stream->format == mInputStreamInfo.format)) {
6278 // Usage flag for an input stream and the source output stream
6279 // may be different.
6280 LOGD("Found reprocessible output stream! %p", *it);
6281 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6282 stream->usage, mInputStreamInfo.usage);
6283
6284 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6285 if (channel != NULL && channel->mStreams[0]) {
6286 id = channel->mStreams[0]->getMyServerID();
6287 return NO_ERROR;
6288 }
6289 }
6290 }
6291 } else {
6292 LOGD("No input stream, so no reprocessible output stream");
6293 }
6294 return NAME_NOT_FOUND;
6295}
6296
6297/*===========================================================================
6298 * FUNCTION : lookupFwkName
6299 *
6300 * DESCRIPTION: In case the enum is not same in fwk and backend
6301 * make sure the parameter is correctly propogated
6302 *
6303 * PARAMETERS :
6304 * @arr : map between the two enums
6305 * @len : len of the map
6306 * @hal_name : name of the hal_parm to map
6307 *
6308 * RETURN : int type of status
6309 * fwk_name -- success
6310 * none-zero failure code
6311 *==========================================================================*/
6312template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6313 size_t len, halType hal_name)
6314{
6315
6316 for (size_t i = 0; i < len; i++) {
6317 if (arr[i].hal_name == hal_name) {
6318 return arr[i].fwk_name;
6319 }
6320 }
6321
6322 /* Not able to find matching framework type is not necessarily
6323 * an error case. This happens when mm-camera supports more attributes
6324 * than the frameworks do */
6325 LOGH("Cannot find matching framework type");
6326 return NAME_NOT_FOUND;
6327}
6328
6329/*===========================================================================
6330 * FUNCTION : lookupHalName
6331 *
6332 * DESCRIPTION: In case the enum is not same in fwk and backend
6333 * make sure the parameter is correctly propogated
6334 *
6335 * PARAMETERS :
6336 * @arr : map between the two enums
6337 * @len : len of the map
6338 * @fwk_name : name of the hal_parm to map
6339 *
6340 * RETURN : int32_t type of status
6341 * hal_name -- success
6342 * none-zero failure code
6343 *==========================================================================*/
6344template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6345 size_t len, fwkType fwk_name)
6346{
6347 for (size_t i = 0; i < len; i++) {
6348 if (arr[i].fwk_name == fwk_name) {
6349 return arr[i].hal_name;
6350 }
6351 }
6352
6353 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6354 return NAME_NOT_FOUND;
6355}
6356
6357/*===========================================================================
6358 * FUNCTION : lookupProp
6359 *
6360 * DESCRIPTION: lookup a value by its name
6361 *
6362 * PARAMETERS :
6363 * @arr : map between the two enums
6364 * @len : size of the map
6365 * @name : name to be looked up
6366 *
6367 * RETURN : Value if found
6368 * CAM_CDS_MODE_MAX if not found
6369 *==========================================================================*/
6370template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6371 size_t len, const char *name)
6372{
6373 if (name) {
6374 for (size_t i = 0; i < len; i++) {
6375 if (!strcmp(arr[i].desc, name)) {
6376 return arr[i].val;
6377 }
6378 }
6379 }
6380 return CAM_CDS_MODE_MAX;
6381}
6382
6383/*===========================================================================
6384 *
6385 * DESCRIPTION:
6386 *
6387 * PARAMETERS :
6388 * @metadata : metadata information from callback
6389 * @timestamp: metadata buffer timestamp
6390 * @request_id: request id
6391 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006392 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006393 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6394 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006396 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6397 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006398 *
6399 * RETURN : camera_metadata_t*
6400 * metadata in a format specified by fwk
6401 *==========================================================================*/
6402camera_metadata_t*
6403QCamera3HardwareInterface::translateFromHalMetadata(
6404 metadata_buffer_t *metadata,
6405 nsecs_t timestamp,
6406 int32_t request_id,
6407 const CameraMetadata& jpegMetadata,
6408 uint8_t pipeline_depth,
6409 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006410 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006411 /* DevCamDebug metadata translateFromHalMetadata argument */
6412 uint8_t DevCamDebug_meta_enable,
6413 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006414 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006415 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006416 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006417{
6418 CameraMetadata camMetadata;
6419 camera_metadata_t *resultMetadata;
6420
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006421 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006422 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6423 * Timestamp is needed because it's used for shutter notify calculation.
6424 * */
6425 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6426 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006427 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006428 }
6429
Thierry Strudel3d639192016-09-09 11:52:26 -07006430 if (jpegMetadata.entryCount())
6431 camMetadata.append(jpegMetadata);
6432
6433 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6434 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6435 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6436 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006437 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006438 if (mBatchSize == 0) {
6439 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6440 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6441 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006442
Samuel Ha68ba5172016-12-15 18:41:12 -08006443 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6444 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6445 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6446 // DevCamDebug metadata translateFromHalMetadata AF
6447 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6448 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6449 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6450 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6451 }
6452 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6453 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6454 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6455 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6456 }
6457 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6458 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6459 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6460 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6461 }
6462 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6463 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6464 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6465 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6466 }
6467 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6468 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6469 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6470 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6471 }
6472 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6473 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6474 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6475 *DevCamDebug_af_monitor_pdaf_target_pos;
6476 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6477 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6478 }
6479 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6480 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6481 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6482 *DevCamDebug_af_monitor_pdaf_confidence;
6483 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6484 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6485 }
6486 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6487 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6488 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6489 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6490 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6491 }
6492 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6493 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6494 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6495 *DevCamDebug_af_monitor_tof_target_pos;
6496 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6497 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6498 }
6499 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6500 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6501 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6502 *DevCamDebug_af_monitor_tof_confidence;
6503 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6504 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6505 }
6506 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6507 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6508 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6509 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6510 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6511 }
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6513 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6514 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6515 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6516 &fwk_DevCamDebug_af_monitor_type_select, 1);
6517 }
6518 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6519 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6520 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6521 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6522 &fwk_DevCamDebug_af_monitor_refocus, 1);
6523 }
6524 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6525 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6526 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6527 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6528 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6529 }
6530 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6531 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6532 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6533 *DevCamDebug_af_search_pdaf_target_pos;
6534 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6535 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6536 }
6537 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6538 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6539 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6540 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6541 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6542 }
6543 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6544 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6545 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6546 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6547 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6548 }
6549 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6550 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6551 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6552 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6553 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6554 }
6555 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6556 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6557 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6558 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6559 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6560 }
6561 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6562 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6563 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6564 *DevCamDebug_af_search_tof_target_pos;
6565 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6566 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6567 }
6568 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6569 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6570 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6571 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6572 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6575 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6576 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6577 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6578 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6579 }
6580 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6581 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6582 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6583 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6584 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6585 }
6586 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6587 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6588 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6589 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6590 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6591 }
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6593 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6594 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6595 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6596 &fwk_DevCamDebug_af_search_type_select, 1);
6597 }
6598 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6599 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6600 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6601 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6602 &fwk_DevCamDebug_af_search_next_pos, 1);
6603 }
6604 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6605 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6606 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6607 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6608 &fwk_DevCamDebug_af_search_target_pos, 1);
6609 }
6610 // DevCamDebug metadata translateFromHalMetadata AEC
6611 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6612 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6613 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6614 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6617 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6618 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6619 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6622 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6623 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6624 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6627 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6628 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6629 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6632 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6633 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6634 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6635 }
6636 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6637 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6638 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6639 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6642 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6643 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6644 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6645 }
6646 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6647 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6648 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6649 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6650 }
Samuel Ha34229982017-02-17 13:51:11 -08006651 // DevCamDebug metadata translateFromHalMetadata zzHDR
6652 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6653 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6654 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6655 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6658 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6659 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6660 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6661 }
6662 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6663 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6664 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6665 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6666 }
6667 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6668 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6669 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6670 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6671 }
6672 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6673 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6674 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6675 *DevCamDebug_aec_hdr_sensitivity_ratio;
6676 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6677 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6678 }
6679 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6680 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6681 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6682 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6683 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6684 }
6685 // DevCamDebug metadata translateFromHalMetadata ADRC
6686 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6687 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6688 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6689 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6690 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6691 }
6692 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6693 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6694 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6695 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6696 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6697 }
6698 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6699 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6700 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6701 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6702 }
6703 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6704 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6705 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6706 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6707 }
6708 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6709 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6710 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6711 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6712 }
6713 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6714 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6715 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6716 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6717 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006718 // DevCamDebug metadata translateFromHalMetadata AWB
6719 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6720 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6721 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6722 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6723 }
6724 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6725 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6726 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6727 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6728 }
6729 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6730 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6731 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6732 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6733 }
6734 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6735 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6736 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6737 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6740 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6741 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6742 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6743 }
6744 }
6745 // atrace_end(ATRACE_TAG_ALWAYS);
6746
Thierry Strudel3d639192016-09-09 11:52:26 -07006747 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6748 int64_t fwk_frame_number = *frame_number;
6749 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6750 }
6751
6752 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6753 int32_t fps_range[2];
6754 fps_range[0] = (int32_t)float_range->min_fps;
6755 fps_range[1] = (int32_t)float_range->max_fps;
6756 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6757 fps_range, 2);
6758 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6759 fps_range[0], fps_range[1]);
6760 }
6761
6762 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6763 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6764 }
6765
6766 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6767 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6768 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6769 *sceneMode);
6770 if (NAME_NOT_FOUND != val) {
6771 uint8_t fwkSceneMode = (uint8_t)val;
6772 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6773 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6774 fwkSceneMode);
6775 }
6776 }
6777
6778 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6779 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6780 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6781 }
6782
6783 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6784 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6785 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6786 }
6787
6788 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6789 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6790 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6791 }
6792
6793 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6794 CAM_INTF_META_EDGE_MODE, metadata) {
6795 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6796 }
6797
6798 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6799 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6800 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6801 }
6802
6803 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6804 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6805 }
6806
6807 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6808 if (0 <= *flashState) {
6809 uint8_t fwk_flashState = (uint8_t) *flashState;
6810 if (!gCamCapability[mCameraId]->flash_available) {
6811 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6812 }
6813 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6814 }
6815 }
6816
6817 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6818 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6819 if (NAME_NOT_FOUND != val) {
6820 uint8_t fwk_flashMode = (uint8_t)val;
6821 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6822 }
6823 }
6824
6825 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6826 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6827 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6828 }
6829
6830 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6831 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6832 }
6833
6834 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6835 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6836 }
6837
6838 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6839 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6840 }
6841
6842 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6843 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6844 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6845 }
6846
6847 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6848 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6849 LOGD("fwk_videoStab = %d", fwk_videoStab);
6850 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6851 } else {
6852 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6853 // and so hardcoding the Video Stab result to OFF mode.
6854 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6855 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006856 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006857 }
6858
6859 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6860 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6861 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6862 }
6863
6864 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6865 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6866 }
6867
Thierry Strudel3d639192016-09-09 11:52:26 -07006868 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6869 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006870 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006871
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006872 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6873 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006874
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006875 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006876 blackLevelAppliedPattern->cam_black_level[0],
6877 blackLevelAppliedPattern->cam_black_level[1],
6878 blackLevelAppliedPattern->cam_black_level[2],
6879 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006880 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6881 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006882
6883#ifndef USE_HAL_3_3
6884 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006885 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6886 // depth space.
6887 fwk_blackLevelInd[0] /= 4.0;
6888 fwk_blackLevelInd[1] /= 4.0;
6889 fwk_blackLevelInd[2] /= 4.0;
6890 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006891 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6892 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006893#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006894 }
6895
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006896#ifndef USE_HAL_3_3
6897 // Fixed whitelevel is used by ISP/Sensor
6898 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6899 &gCamCapability[mCameraId]->white_level, 1);
6900#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006901
6902 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6903 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6904 int32_t scalerCropRegion[4];
6905 scalerCropRegion[0] = hScalerCropRegion->left;
6906 scalerCropRegion[1] = hScalerCropRegion->top;
6907 scalerCropRegion[2] = hScalerCropRegion->width;
6908 scalerCropRegion[3] = hScalerCropRegion->height;
6909
6910 // Adjust crop region from sensor output coordinate system to active
6911 // array coordinate system.
6912 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6913 scalerCropRegion[2], scalerCropRegion[3]);
6914
6915 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6916 }
6917
6918 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6919 LOGD("sensorExpTime = %lld", *sensorExpTime);
6920 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6921 }
6922
6923 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6924 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6925 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6926 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6927 }
6928
6929 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6930 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6931 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6932 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6933 sensorRollingShutterSkew, 1);
6934 }
6935
6936 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6937 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6938 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6939
6940 //calculate the noise profile based on sensitivity
6941 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6942 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6943 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6944 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6945 noise_profile[i] = noise_profile_S;
6946 noise_profile[i+1] = noise_profile_O;
6947 }
6948 LOGD("noise model entry (S, O) is (%f, %f)",
6949 noise_profile_S, noise_profile_O);
6950 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6951 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6952 }
6953
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006954#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006955 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006956 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006957 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006958 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006959 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6960 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6961 }
6962 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006963#endif
6964
Thierry Strudel3d639192016-09-09 11:52:26 -07006965 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6966 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6967 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6968 }
6969
6970 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6971 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6972 *faceDetectMode);
6973 if (NAME_NOT_FOUND != val) {
6974 uint8_t fwk_faceDetectMode = (uint8_t)val;
6975 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6976
6977 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6978 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6979 CAM_INTF_META_FACE_DETECTION, metadata) {
6980 uint8_t numFaces = MIN(
6981 faceDetectionInfo->num_faces_detected, MAX_ROI);
6982 int32_t faceIds[MAX_ROI];
6983 uint8_t faceScores[MAX_ROI];
6984 int32_t faceRectangles[MAX_ROI * 4];
6985 int32_t faceLandmarks[MAX_ROI * 6];
6986 size_t j = 0, k = 0;
6987
6988 for (size_t i = 0; i < numFaces; i++) {
6989 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6990 // Adjust crop region from sensor output coordinate system to active
6991 // array coordinate system.
6992 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6993 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6994 rect.width, rect.height);
6995
6996 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6997 faceRectangles+j, -1);
6998
6999 j+= 4;
7000 }
7001 if (numFaces <= 0) {
7002 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7003 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7004 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7005 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7006 }
7007
7008 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7009 numFaces);
7010 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7011 faceRectangles, numFaces * 4U);
7012 if (fwk_faceDetectMode ==
7013 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7014 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7015 CAM_INTF_META_FACE_LANDMARK, metadata) {
7016
7017 for (size_t i = 0; i < numFaces; i++) {
7018 // Map the co-ordinate sensor output coordinate system to active
7019 // array coordinate system.
7020 mCropRegionMapper.toActiveArray(
7021 landmarks->face_landmarks[i].left_eye_center.x,
7022 landmarks->face_landmarks[i].left_eye_center.y);
7023 mCropRegionMapper.toActiveArray(
7024 landmarks->face_landmarks[i].right_eye_center.x,
7025 landmarks->face_landmarks[i].right_eye_center.y);
7026 mCropRegionMapper.toActiveArray(
7027 landmarks->face_landmarks[i].mouth_center.x,
7028 landmarks->face_landmarks[i].mouth_center.y);
7029
7030 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007031 k+= TOTAL_LANDMARK_INDICES;
7032 }
7033 } else {
7034 for (size_t i = 0; i < numFaces; i++) {
7035 setInvalidLandmarks(faceLandmarks+k);
7036 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007037 }
7038 }
7039
7040 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7041 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7042 faceLandmarks, numFaces * 6U);
7043 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007044 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7045 CAM_INTF_META_FACE_BLINK, metadata) {
7046 uint8_t detected[MAX_ROI];
7047 uint8_t degree[MAX_ROI * 2];
7048 for (size_t i = 0; i < numFaces; i++) {
7049 detected[i] = blinks->blink[i].blink_detected;
7050 degree[2 * i] = blinks->blink[i].left_blink;
7051 degree[2 * i + 1] = blinks->blink[i].right_blink;
7052 }
7053 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7054 detected, numFaces);
7055 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7056 degree, numFaces * 2);
7057 }
7058 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7059 CAM_INTF_META_FACE_SMILE, metadata) {
7060 uint8_t degree[MAX_ROI];
7061 uint8_t confidence[MAX_ROI];
7062 for (size_t i = 0; i < numFaces; i++) {
7063 degree[i] = smiles->smile[i].smile_degree;
7064 confidence[i] = smiles->smile[i].smile_confidence;
7065 }
7066 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7067 degree, numFaces);
7068 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7069 confidence, numFaces);
7070 }
7071 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7072 CAM_INTF_META_FACE_GAZE, metadata) {
7073 int8_t angle[MAX_ROI];
7074 int32_t direction[MAX_ROI * 3];
7075 int8_t degree[MAX_ROI * 2];
7076 for (size_t i = 0; i < numFaces; i++) {
7077 angle[i] = gazes->gaze[i].gaze_angle;
7078 direction[3 * i] = gazes->gaze[i].updown_dir;
7079 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7080 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7081 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7082 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7083 }
7084 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7085 (uint8_t *)angle, numFaces);
7086 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7087 direction, numFaces * 3);
7088 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7089 (uint8_t *)degree, numFaces * 2);
7090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007091 }
7092 }
7093 }
7094 }
7095
7096 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7097 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007098 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007099 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007100 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007101
Shuzhen Wang14415f52016-11-16 18:26:18 -08007102 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7103 histogramBins = *histBins;
7104 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7105 }
7106
7107 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007108 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7109 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007110 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007111
7112 switch (stats_data->type) {
7113 case CAM_HISTOGRAM_TYPE_BAYER:
7114 switch (stats_data->bayer_stats.data_type) {
7115 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007116 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7117 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007118 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007119 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7120 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007121 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007122 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7123 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007125 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007126 case CAM_STATS_CHANNEL_R:
7127 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007128 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7129 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130 }
7131 break;
7132 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007133 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 break;
7135 }
7136
Shuzhen Wang14415f52016-11-16 18:26:18 -08007137 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007138 }
7139 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007140 }
7141
7142 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7143 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7144 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7145 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7146 }
7147
7148 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7149 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7150 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7151 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7152 }
7153
7154 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7155 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7156 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7157 CAM_MAX_SHADING_MAP_HEIGHT);
7158 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7159 CAM_MAX_SHADING_MAP_WIDTH);
7160 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7161 lensShadingMap->lens_shading, 4U * map_width * map_height);
7162 }
7163
7164 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7165 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7166 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7167 }
7168
7169 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7170 //Populate CAM_INTF_META_TONEMAP_CURVES
7171 /* ch0 = G, ch 1 = B, ch 2 = R*/
7172 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7173 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7174 tonemap->tonemap_points_cnt,
7175 CAM_MAX_TONEMAP_CURVE_SIZE);
7176 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7177 }
7178
7179 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7180 &tonemap->curves[0].tonemap_points[0][0],
7181 tonemap->tonemap_points_cnt * 2);
7182
7183 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7184 &tonemap->curves[1].tonemap_points[0][0],
7185 tonemap->tonemap_points_cnt * 2);
7186
7187 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7188 &tonemap->curves[2].tonemap_points[0][0],
7189 tonemap->tonemap_points_cnt * 2);
7190 }
7191
7192 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7193 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7194 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7195 CC_GAIN_MAX);
7196 }
7197
7198 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7199 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7200 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7201 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7202 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7203 }
7204
7205 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7206 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7207 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7208 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7209 toneCurve->tonemap_points_cnt,
7210 CAM_MAX_TONEMAP_CURVE_SIZE);
7211 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7212 }
7213 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7214 (float*)toneCurve->curve.tonemap_points,
7215 toneCurve->tonemap_points_cnt * 2);
7216 }
7217
7218 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7219 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7220 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7221 predColorCorrectionGains->gains, 4);
7222 }
7223
7224 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7225 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7226 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7227 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7228 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7229 }
7230
7231 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7232 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7233 }
7234
7235 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7236 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7237 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7238 }
7239
7240 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7241 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7242 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7243 }
7244
7245 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7246 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7247 *effectMode);
7248 if (NAME_NOT_FOUND != val) {
7249 uint8_t fwk_effectMode = (uint8_t)val;
7250 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7251 }
7252 }
7253
7254 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7255 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7256 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7257 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7258 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7259 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7260 }
7261 int32_t fwk_testPatternData[4];
7262 fwk_testPatternData[0] = testPatternData->r;
7263 fwk_testPatternData[3] = testPatternData->b;
7264 switch (gCamCapability[mCameraId]->color_arrangement) {
7265 case CAM_FILTER_ARRANGEMENT_RGGB:
7266 case CAM_FILTER_ARRANGEMENT_GRBG:
7267 fwk_testPatternData[1] = testPatternData->gr;
7268 fwk_testPatternData[2] = testPatternData->gb;
7269 break;
7270 case CAM_FILTER_ARRANGEMENT_GBRG:
7271 case CAM_FILTER_ARRANGEMENT_BGGR:
7272 fwk_testPatternData[2] = testPatternData->gr;
7273 fwk_testPatternData[1] = testPatternData->gb;
7274 break;
7275 default:
7276 LOGE("color arrangement %d is not supported",
7277 gCamCapability[mCameraId]->color_arrangement);
7278 break;
7279 }
7280 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7281 }
7282
7283 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7284 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7285 }
7286
7287 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7288 String8 str((const char *)gps_methods);
7289 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7290 }
7291
7292 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7293 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7294 }
7295
7296 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7297 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7298 }
7299
7300 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7301 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7302 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7303 }
7304
7305 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7306 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7307 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7308 }
7309
7310 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7311 int32_t fwk_thumb_size[2];
7312 fwk_thumb_size[0] = thumb_size->width;
7313 fwk_thumb_size[1] = thumb_size->height;
7314 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7315 }
7316
7317 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7318 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7319 privateData,
7320 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7321 }
7322
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007323 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007324 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007325 meteringMode, 1);
7326 }
7327
Thierry Strudel54dc9782017-02-15 12:12:10 -08007328 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7329 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7330 LOGD("hdr_scene_data: %d %f\n",
7331 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7332 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7333 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7334 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7335 &isHdr, 1);
7336 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7337 &isHdrConfidence, 1);
7338 }
7339
7340
7341
Thierry Strudel3d639192016-09-09 11:52:26 -07007342 if (metadata->is_tuning_params_valid) {
7343 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7344 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7345 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7346
7347
7348 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7349 sizeof(uint32_t));
7350 data += sizeof(uint32_t);
7351
7352 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7353 sizeof(uint32_t));
7354 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7355 data += sizeof(uint32_t);
7356
7357 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7358 sizeof(uint32_t));
7359 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7360 data += sizeof(uint32_t);
7361
7362 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7363 sizeof(uint32_t));
7364 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7365 data += sizeof(uint32_t);
7366
7367 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7368 sizeof(uint32_t));
7369 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7370 data += sizeof(uint32_t);
7371
7372 metadata->tuning_params.tuning_mod3_data_size = 0;
7373 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7374 sizeof(uint32_t));
7375 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7376 data += sizeof(uint32_t);
7377
7378 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7379 TUNING_SENSOR_DATA_MAX);
7380 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7381 count);
7382 data += count;
7383
7384 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7385 TUNING_VFE_DATA_MAX);
7386 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7387 count);
7388 data += count;
7389
7390 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7391 TUNING_CPP_DATA_MAX);
7392 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7393 count);
7394 data += count;
7395
7396 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7397 TUNING_CAC_DATA_MAX);
7398 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7399 count);
7400 data += count;
7401
7402 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7403 (int32_t *)(void *)tuning_meta_data_blob,
7404 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7405 }
7406
7407 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7408 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7409 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7410 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7411 NEUTRAL_COL_POINTS);
7412 }
7413
7414 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7415 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7416 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7417 }
7418
7419 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7420 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7421 // Adjust crop region from sensor output coordinate system to active
7422 // array coordinate system.
7423 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7424 hAeRegions->rect.width, hAeRegions->rect.height);
7425
7426 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7427 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7428 REGIONS_TUPLE_COUNT);
7429 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7430 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7431 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7432 hAeRegions->rect.height);
7433 }
7434
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007435 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7436 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7437 if (NAME_NOT_FOUND != val) {
7438 uint8_t fwkAfMode = (uint8_t)val;
7439 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7440 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7441 } else {
7442 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7443 val);
7444 }
7445 }
7446
Thierry Strudel3d639192016-09-09 11:52:26 -07007447 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7448 uint8_t fwk_afState = (uint8_t) *afState;
7449 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007450 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007451 }
7452
7453 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7454 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7455 }
7456
7457 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7458 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7459 }
7460
7461 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7462 uint8_t fwk_lensState = *lensState;
7463 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7464 }
7465
Thierry Strudel3d639192016-09-09 11:52:26 -07007466
7467 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007468 uint32_t ab_mode = *hal_ab_mode;
7469 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7470 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7471 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7472 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007473 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007474 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007475 if (NAME_NOT_FOUND != val) {
7476 uint8_t fwk_ab_mode = (uint8_t)val;
7477 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7478 }
7479 }
7480
7481 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7482 int val = lookupFwkName(SCENE_MODES_MAP,
7483 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7484 if (NAME_NOT_FOUND != val) {
7485 uint8_t fwkBestshotMode = (uint8_t)val;
7486 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7487 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7488 } else {
7489 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7490 }
7491 }
7492
7493 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7494 uint8_t fwk_mode = (uint8_t) *mode;
7495 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7496 }
7497
7498 /* Constant metadata values to be update*/
7499 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7500 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7501
7502 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7503 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7504
7505 int32_t hotPixelMap[2];
7506 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7507
7508 // CDS
7509 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7510 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7511 }
7512
Thierry Strudel04e026f2016-10-10 11:27:36 -07007513 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7514 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007515 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007516 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7517 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7518 } else {
7519 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7520 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007521
7522 if(fwk_hdr != curr_hdr_state) {
7523 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7524 if(fwk_hdr)
7525 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7526 else
7527 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7528 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007529 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7530 }
7531
Thierry Strudel54dc9782017-02-15 12:12:10 -08007532 //binning correction
7533 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7534 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7535 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7536 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7537 }
7538
Thierry Strudel04e026f2016-10-10 11:27:36 -07007539 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007540 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007541 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7542 int8_t is_ir_on = 0;
7543
7544 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7545 if(is_ir_on != curr_ir_state) {
7546 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7547 if(is_ir_on)
7548 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7549 else
7550 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7551 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007552 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007553 }
7554
Thierry Strudel269c81a2016-10-12 12:13:59 -07007555 // AEC SPEED
7556 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7557 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7558 }
7559
7560 // AWB SPEED
7561 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7562 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7563 }
7564
Thierry Strudel3d639192016-09-09 11:52:26 -07007565 // TNR
7566 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7567 uint8_t tnr_enable = tnr->denoise_enable;
7568 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007569 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7570 int8_t is_tnr_on = 0;
7571
7572 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7573 if(is_tnr_on != curr_tnr_state) {
7574 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7575 if(is_tnr_on)
7576 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7577 else
7578 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007580
7581 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7582 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7583 }
7584
7585 // Reprocess crop data
7586 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7587 uint8_t cnt = crop_data->num_of_streams;
7588 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7589 // mm-qcamera-daemon only posts crop_data for streams
7590 // not linked to pproc. So no valid crop metadata is not
7591 // necessarily an error case.
7592 LOGD("No valid crop metadata entries");
7593 } else {
7594 uint32_t reproc_stream_id;
7595 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7596 LOGD("No reprocessible stream found, ignore crop data");
7597 } else {
7598 int rc = NO_ERROR;
7599 Vector<int32_t> roi_map;
7600 int32_t *crop = new int32_t[cnt*4];
7601 if (NULL == crop) {
7602 rc = NO_MEMORY;
7603 }
7604 if (NO_ERROR == rc) {
7605 int32_t streams_found = 0;
7606 for (size_t i = 0; i < cnt; i++) {
7607 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7608 if (pprocDone) {
7609 // HAL already does internal reprocessing,
7610 // either via reprocessing before JPEG encoding,
7611 // or offline postprocessing for pproc bypass case.
7612 crop[0] = 0;
7613 crop[1] = 0;
7614 crop[2] = mInputStreamInfo.dim.width;
7615 crop[3] = mInputStreamInfo.dim.height;
7616 } else {
7617 crop[0] = crop_data->crop_info[i].crop.left;
7618 crop[1] = crop_data->crop_info[i].crop.top;
7619 crop[2] = crop_data->crop_info[i].crop.width;
7620 crop[3] = crop_data->crop_info[i].crop.height;
7621 }
7622 roi_map.add(crop_data->crop_info[i].roi_map.left);
7623 roi_map.add(crop_data->crop_info[i].roi_map.top);
7624 roi_map.add(crop_data->crop_info[i].roi_map.width);
7625 roi_map.add(crop_data->crop_info[i].roi_map.height);
7626 streams_found++;
7627 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7628 crop[0], crop[1], crop[2], crop[3]);
7629 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7630 crop_data->crop_info[i].roi_map.left,
7631 crop_data->crop_info[i].roi_map.top,
7632 crop_data->crop_info[i].roi_map.width,
7633 crop_data->crop_info[i].roi_map.height);
7634 break;
7635
7636 }
7637 }
7638 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7639 &streams_found, 1);
7640 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7641 crop, (size_t)(streams_found * 4));
7642 if (roi_map.array()) {
7643 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7644 roi_map.array(), roi_map.size());
7645 }
7646 }
7647 if (crop) {
7648 delete [] crop;
7649 }
7650 }
7651 }
7652 }
7653
7654 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7655 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7656 // so hardcoding the CAC result to OFF mode.
7657 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7658 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7659 } else {
7660 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7661 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7662 *cacMode);
7663 if (NAME_NOT_FOUND != val) {
7664 uint8_t resultCacMode = (uint8_t)val;
7665 // check whether CAC result from CB is equal to Framework set CAC mode
7666 // If not equal then set the CAC mode came in corresponding request
7667 if (fwk_cacMode != resultCacMode) {
7668 resultCacMode = fwk_cacMode;
7669 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007670 //Check if CAC is disabled by property
7671 if (m_cacModeDisabled) {
7672 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7673 }
7674
Thierry Strudel3d639192016-09-09 11:52:26 -07007675 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7676 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7677 } else {
7678 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7679 }
7680 }
7681 }
7682
7683 // Post blob of cam_cds_data through vendor tag.
7684 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7685 uint8_t cnt = cdsInfo->num_of_streams;
7686 cam_cds_data_t cdsDataOverride;
7687 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7688 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7689 cdsDataOverride.num_of_streams = 1;
7690 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7691 uint32_t reproc_stream_id;
7692 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7693 LOGD("No reprocessible stream found, ignore cds data");
7694 } else {
7695 for (size_t i = 0; i < cnt; i++) {
7696 if (cdsInfo->cds_info[i].stream_id ==
7697 reproc_stream_id) {
7698 cdsDataOverride.cds_info[0].cds_enable =
7699 cdsInfo->cds_info[i].cds_enable;
7700 break;
7701 }
7702 }
7703 }
7704 } else {
7705 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7706 }
7707 camMetadata.update(QCAMERA3_CDS_INFO,
7708 (uint8_t *)&cdsDataOverride,
7709 sizeof(cam_cds_data_t));
7710 }
7711
7712 // Ldaf calibration data
7713 if (!mLdafCalibExist) {
7714 IF_META_AVAILABLE(uint32_t, ldafCalib,
7715 CAM_INTF_META_LDAF_EXIF, metadata) {
7716 mLdafCalibExist = true;
7717 mLdafCalib[0] = ldafCalib[0];
7718 mLdafCalib[1] = ldafCalib[1];
7719 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7720 ldafCalib[0], ldafCalib[1]);
7721 }
7722 }
7723
Thierry Strudel54dc9782017-02-15 12:12:10 -08007724 // EXIF debug data through vendor tag
7725 /*
7726 * Mobicat Mask can assume 3 values:
7727 * 1 refers to Mobicat data,
7728 * 2 refers to Stats Debug and Exif Debug Data
7729 * 3 refers to Mobicat and Stats Debug Data
7730 * We want to make sure that we are sending Exif debug data
7731 * only when Mobicat Mask is 2.
7732 */
7733 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7734 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7735 (uint8_t *)(void *)mExifParams.debug_params,
7736 sizeof(mm_jpeg_debug_exif_params_t));
7737 }
7738
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007739 // Reprocess and DDM debug data through vendor tag
7740 cam_reprocess_info_t repro_info;
7741 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007742 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7743 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007744 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007745 }
7746 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7747 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007748 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007749 }
7750 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7751 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007752 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 }
7754 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7755 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007756 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007757 }
7758 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7759 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007760 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007761 }
7762 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007763 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007764 }
7765 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7766 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007767 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007768 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007769 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7770 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7771 }
7772 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7773 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7774 }
7775 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7776 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007777
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007778 // INSTANT AEC MODE
7779 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7780 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7781 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7782 }
7783
Shuzhen Wange763e802016-03-31 10:24:29 -07007784 // AF scene change
7785 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7786 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7787 }
7788
Thierry Strudel3d639192016-09-09 11:52:26 -07007789 resultMetadata = camMetadata.release();
7790 return resultMetadata;
7791}
7792
7793/*===========================================================================
7794 * FUNCTION : saveExifParams
7795 *
7796 * DESCRIPTION:
7797 *
7798 * PARAMETERS :
7799 * @metadata : metadata information from callback
7800 *
7801 * RETURN : none
7802 *
7803 *==========================================================================*/
7804void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7805{
7806 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7807 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7808 if (mExifParams.debug_params) {
7809 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7810 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7811 }
7812 }
7813 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7814 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7815 if (mExifParams.debug_params) {
7816 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7817 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7818 }
7819 }
7820 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7821 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7822 if (mExifParams.debug_params) {
7823 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7824 mExifParams.debug_params->af_debug_params_valid = TRUE;
7825 }
7826 }
7827 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7828 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7829 if (mExifParams.debug_params) {
7830 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7831 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7832 }
7833 }
7834 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7835 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7836 if (mExifParams.debug_params) {
7837 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7838 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7839 }
7840 }
7841 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7842 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7843 if (mExifParams.debug_params) {
7844 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7845 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7846 }
7847 }
7848 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7849 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7850 if (mExifParams.debug_params) {
7851 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7852 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7853 }
7854 }
7855 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7856 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7857 if (mExifParams.debug_params) {
7858 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7859 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7860 }
7861 }
7862}
7863
7864/*===========================================================================
7865 * FUNCTION : get3AExifParams
7866 *
7867 * DESCRIPTION:
7868 *
7869 * PARAMETERS : none
7870 *
7871 *
7872 * RETURN : mm_jpeg_exif_params_t
7873 *
7874 *==========================================================================*/
7875mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7876{
7877 return mExifParams;
7878}
7879
7880/*===========================================================================
7881 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7882 *
7883 * DESCRIPTION:
7884 *
7885 * PARAMETERS :
7886 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007887 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7888 * urgent metadata in a batch. Always true for
7889 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007890 *
7891 * RETURN : camera_metadata_t*
7892 * metadata in a format specified by fwk
7893 *==========================================================================*/
7894camera_metadata_t*
7895QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007896 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007897{
7898 CameraMetadata camMetadata;
7899 camera_metadata_t *resultMetadata;
7900
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007901 if (!lastUrgentMetadataInBatch) {
7902 /* In batch mode, use empty metadata if this is not the last in batch
7903 */
7904 resultMetadata = allocate_camera_metadata(0, 0);
7905 return resultMetadata;
7906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007907
7908 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7909 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7910 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7911 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7912 }
7913
7914 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7915 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7916 &aecTrigger->trigger, 1);
7917 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7918 &aecTrigger->trigger_id, 1);
7919 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7920 aecTrigger->trigger);
7921 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7922 aecTrigger->trigger_id);
7923 }
7924
7925 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7926 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7927 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7928 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7929 }
7930
Thierry Strudel3d639192016-09-09 11:52:26 -07007931 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7932 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7933 &af_trigger->trigger, 1);
7934 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7935 af_trigger->trigger);
7936 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7937 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7938 af_trigger->trigger_id);
7939 }
7940
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07007941 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7942 /*af regions*/
7943 int32_t afRegions[REGIONS_TUPLE_COUNT];
7944 // Adjust crop region from sensor output coordinate system to active
7945 // array coordinate system.
7946 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7947 hAfRegions->rect.width, hAfRegions->rect.height);
7948
7949 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7950 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7951 REGIONS_TUPLE_COUNT);
7952 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7953 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7954 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7955 hAfRegions->rect.height);
7956 }
7957
Shuzhen Wangcc386c52017-03-29 09:28:08 -07007958 // AF region confidence
7959 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
7960 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
7961 }
7962
Thierry Strudel3d639192016-09-09 11:52:26 -07007963 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7964 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7965 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7966 if (NAME_NOT_FOUND != val) {
7967 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7968 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7969 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7970 } else {
7971 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7972 }
7973 }
7974
7975 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7976 uint32_t aeMode = CAM_AE_MODE_MAX;
7977 int32_t flashMode = CAM_FLASH_MODE_MAX;
7978 int32_t redeye = -1;
7979 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7980 aeMode = *pAeMode;
7981 }
7982 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7983 flashMode = *pFlashMode;
7984 }
7985 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7986 redeye = *pRedeye;
7987 }
7988
7989 if (1 == redeye) {
7990 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7991 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7992 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7993 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7994 flashMode);
7995 if (NAME_NOT_FOUND != val) {
7996 fwk_aeMode = (uint8_t)val;
7997 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7998 } else {
7999 LOGE("Unsupported flash mode %d", flashMode);
8000 }
8001 } else if (aeMode == CAM_AE_MODE_ON) {
8002 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8003 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8004 } else if (aeMode == CAM_AE_MODE_OFF) {
8005 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8006 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8007 } else {
8008 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8009 "flashMode:%d, aeMode:%u!!!",
8010 redeye, flashMode, aeMode);
8011 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008012 if (mInstantAEC) {
8013 // Increment frame Idx count untill a bound reached for instant AEC.
8014 mInstantAecFrameIdxCount++;
8015 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8016 CAM_INTF_META_AEC_INFO, metadata) {
8017 LOGH("ae_params->settled = %d",ae_params->settled);
8018 // If AEC settled, or if number of frames reached bound value,
8019 // should reset instant AEC.
8020 if (ae_params->settled ||
8021 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8022 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8023 mInstantAEC = false;
8024 mResetInstantAEC = true;
8025 mInstantAecFrameIdxCount = 0;
8026 }
8027 }
8028 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008029 resultMetadata = camMetadata.release();
8030 return resultMetadata;
8031}
8032
8033/*===========================================================================
8034 * FUNCTION : dumpMetadataToFile
8035 *
8036 * DESCRIPTION: Dumps tuning metadata to file system
8037 *
8038 * PARAMETERS :
8039 * @meta : tuning metadata
8040 * @dumpFrameCount : current dump frame count
8041 * @enabled : Enable mask
8042 *
8043 *==========================================================================*/
8044void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8045 uint32_t &dumpFrameCount,
8046 bool enabled,
8047 const char *type,
8048 uint32_t frameNumber)
8049{
8050 //Some sanity checks
8051 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8052 LOGE("Tuning sensor data size bigger than expected %d: %d",
8053 meta.tuning_sensor_data_size,
8054 TUNING_SENSOR_DATA_MAX);
8055 return;
8056 }
8057
8058 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8059 LOGE("Tuning VFE data size bigger than expected %d: %d",
8060 meta.tuning_vfe_data_size,
8061 TUNING_VFE_DATA_MAX);
8062 return;
8063 }
8064
8065 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8066 LOGE("Tuning CPP data size bigger than expected %d: %d",
8067 meta.tuning_cpp_data_size,
8068 TUNING_CPP_DATA_MAX);
8069 return;
8070 }
8071
8072 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8073 LOGE("Tuning CAC data size bigger than expected %d: %d",
8074 meta.tuning_cac_data_size,
8075 TUNING_CAC_DATA_MAX);
8076 return;
8077 }
8078 //
8079
8080 if(enabled){
8081 char timeBuf[FILENAME_MAX];
8082 char buf[FILENAME_MAX];
8083 memset(buf, 0, sizeof(buf));
8084 memset(timeBuf, 0, sizeof(timeBuf));
8085 time_t current_time;
8086 struct tm * timeinfo;
8087 time (&current_time);
8088 timeinfo = localtime (&current_time);
8089 if (timeinfo != NULL) {
8090 strftime (timeBuf, sizeof(timeBuf),
8091 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8092 }
8093 String8 filePath(timeBuf);
8094 snprintf(buf,
8095 sizeof(buf),
8096 "%dm_%s_%d.bin",
8097 dumpFrameCount,
8098 type,
8099 frameNumber);
8100 filePath.append(buf);
8101 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8102 if (file_fd >= 0) {
8103 ssize_t written_len = 0;
8104 meta.tuning_data_version = TUNING_DATA_VERSION;
8105 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8106 written_len += write(file_fd, data, sizeof(uint32_t));
8107 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8108 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8109 written_len += write(file_fd, data, sizeof(uint32_t));
8110 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8111 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8112 written_len += write(file_fd, data, sizeof(uint32_t));
8113 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8114 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8115 written_len += write(file_fd, data, sizeof(uint32_t));
8116 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8117 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8118 written_len += write(file_fd, data, sizeof(uint32_t));
8119 meta.tuning_mod3_data_size = 0;
8120 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8121 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8122 written_len += write(file_fd, data, sizeof(uint32_t));
8123 size_t total_size = meta.tuning_sensor_data_size;
8124 data = (void *)((uint8_t *)&meta.data);
8125 written_len += write(file_fd, data, total_size);
8126 total_size = meta.tuning_vfe_data_size;
8127 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8128 written_len += write(file_fd, data, total_size);
8129 total_size = meta.tuning_cpp_data_size;
8130 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8131 written_len += write(file_fd, data, total_size);
8132 total_size = meta.tuning_cac_data_size;
8133 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8134 written_len += write(file_fd, data, total_size);
8135 close(file_fd);
8136 }else {
8137 LOGE("fail to open file for metadata dumping");
8138 }
8139 }
8140}
8141
8142/*===========================================================================
8143 * FUNCTION : cleanAndSortStreamInfo
8144 *
8145 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8146 * and sort them such that raw stream is at the end of the list
8147 * This is a workaround for camera daemon constraint.
8148 *
8149 * PARAMETERS : None
8150 *
8151 *==========================================================================*/
8152void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8153{
8154 List<stream_info_t *> newStreamInfo;
8155
8156 /*clean up invalid streams*/
8157 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8158 it != mStreamInfo.end();) {
8159 if(((*it)->status) == INVALID){
8160 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8161 delete channel;
8162 free(*it);
8163 it = mStreamInfo.erase(it);
8164 } else {
8165 it++;
8166 }
8167 }
8168
8169 // Move preview/video/callback/snapshot streams into newList
8170 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8171 it != mStreamInfo.end();) {
8172 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8173 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8174 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8175 newStreamInfo.push_back(*it);
8176 it = mStreamInfo.erase(it);
8177 } else
8178 it++;
8179 }
8180 // Move raw streams into newList
8181 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8182 it != mStreamInfo.end();) {
8183 newStreamInfo.push_back(*it);
8184 it = mStreamInfo.erase(it);
8185 }
8186
8187 mStreamInfo = newStreamInfo;
8188}
8189
8190/*===========================================================================
8191 * FUNCTION : extractJpegMetadata
8192 *
8193 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8194 * JPEG metadata is cached in HAL, and return as part of capture
8195 * result when metadata is returned from camera daemon.
8196 *
8197 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8198 * @request: capture request
8199 *
8200 *==========================================================================*/
8201void QCamera3HardwareInterface::extractJpegMetadata(
8202 CameraMetadata& jpegMetadata,
8203 const camera3_capture_request_t *request)
8204{
8205 CameraMetadata frame_settings;
8206 frame_settings = request->settings;
8207
8208 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8209 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8210 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8211 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8212
8213 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8214 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8215 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8216 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8217
8218 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8219 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8220 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8221 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8222
8223 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8224 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8225 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8226 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8227
8228 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8229 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8230 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8231 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8232
8233 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8234 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8235 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8236 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8237
8238 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8239 int32_t thumbnail_size[2];
8240 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8241 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8242 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8243 int32_t orientation =
8244 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008245 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008246 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8247 int32_t temp;
8248 temp = thumbnail_size[0];
8249 thumbnail_size[0] = thumbnail_size[1];
8250 thumbnail_size[1] = temp;
8251 }
8252 }
8253 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8254 thumbnail_size,
8255 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8256 }
8257
8258}
8259
8260/*===========================================================================
8261 * FUNCTION : convertToRegions
8262 *
8263 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8264 *
8265 * PARAMETERS :
8266 * @rect : cam_rect_t struct to convert
8267 * @region : int32_t destination array
8268 * @weight : if we are converting from cam_area_t, weight is valid
8269 * else weight = -1
8270 *
8271 *==========================================================================*/
8272void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8273 int32_t *region, int weight)
8274{
8275 region[0] = rect.left;
8276 region[1] = rect.top;
8277 region[2] = rect.left + rect.width;
8278 region[3] = rect.top + rect.height;
8279 if (weight > -1) {
8280 region[4] = weight;
8281 }
8282}
8283
8284/*===========================================================================
8285 * FUNCTION : convertFromRegions
8286 *
8287 * DESCRIPTION: helper method to convert from array to cam_rect_t
8288 *
8289 * PARAMETERS :
8290 * @rect : cam_rect_t struct to convert
8291 * @region : int32_t destination array
8292 * @weight : if we are converting from cam_area_t, weight is valid
8293 * else weight = -1
8294 *
8295 *==========================================================================*/
8296void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008297 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008298{
Thierry Strudel3d639192016-09-09 11:52:26 -07008299 int32_t x_min = frame_settings.find(tag).data.i32[0];
8300 int32_t y_min = frame_settings.find(tag).data.i32[1];
8301 int32_t x_max = frame_settings.find(tag).data.i32[2];
8302 int32_t y_max = frame_settings.find(tag).data.i32[3];
8303 roi.weight = frame_settings.find(tag).data.i32[4];
8304 roi.rect.left = x_min;
8305 roi.rect.top = y_min;
8306 roi.rect.width = x_max - x_min;
8307 roi.rect.height = y_max - y_min;
8308}
8309
8310/*===========================================================================
8311 * FUNCTION : resetIfNeededROI
8312 *
8313 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8314 * crop region
8315 *
8316 * PARAMETERS :
8317 * @roi : cam_area_t struct to resize
8318 * @scalerCropRegion : cam_crop_region_t region to compare against
8319 *
8320 *
8321 *==========================================================================*/
8322bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8323 const cam_crop_region_t* scalerCropRegion)
8324{
8325 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8326 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8327 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8328 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8329
8330 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8331 * without having this check the calculations below to validate if the roi
8332 * is inside scalar crop region will fail resulting in the roi not being
8333 * reset causing algorithm to continue to use stale roi window
8334 */
8335 if (roi->weight == 0) {
8336 return true;
8337 }
8338
8339 if ((roi_x_max < scalerCropRegion->left) ||
8340 // right edge of roi window is left of scalar crop's left edge
8341 (roi_y_max < scalerCropRegion->top) ||
8342 // bottom edge of roi window is above scalar crop's top edge
8343 (roi->rect.left > crop_x_max) ||
8344 // left edge of roi window is beyond(right) of scalar crop's right edge
8345 (roi->rect.top > crop_y_max)){
8346 // top edge of roi windo is above scalar crop's top edge
8347 return false;
8348 }
8349 if (roi->rect.left < scalerCropRegion->left) {
8350 roi->rect.left = scalerCropRegion->left;
8351 }
8352 if (roi->rect.top < scalerCropRegion->top) {
8353 roi->rect.top = scalerCropRegion->top;
8354 }
8355 if (roi_x_max > crop_x_max) {
8356 roi_x_max = crop_x_max;
8357 }
8358 if (roi_y_max > crop_y_max) {
8359 roi_y_max = crop_y_max;
8360 }
8361 roi->rect.width = roi_x_max - roi->rect.left;
8362 roi->rect.height = roi_y_max - roi->rect.top;
8363 return true;
8364}
8365
8366/*===========================================================================
8367 * FUNCTION : convertLandmarks
8368 *
8369 * DESCRIPTION: helper method to extract the landmarks from face detection info
8370 *
8371 * PARAMETERS :
8372 * @landmark_data : input landmark data to be converted
8373 * @landmarks : int32_t destination array
8374 *
8375 *
8376 *==========================================================================*/
8377void QCamera3HardwareInterface::convertLandmarks(
8378 cam_face_landmarks_info_t landmark_data,
8379 int32_t *landmarks)
8380{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008381 if (landmark_data.is_left_eye_valid) {
8382 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8383 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8384 } else {
8385 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8386 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8387 }
8388
8389 if (landmark_data.is_right_eye_valid) {
8390 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8391 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8392 } else {
8393 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8394 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8395 }
8396
8397 if (landmark_data.is_mouth_valid) {
8398 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8399 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8400 } else {
8401 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8402 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8403 }
8404}
8405
8406/*===========================================================================
8407 * FUNCTION : setInvalidLandmarks
8408 *
8409 * DESCRIPTION: helper method to set invalid landmarks
8410 *
8411 * PARAMETERS :
8412 * @landmarks : int32_t destination array
8413 *
8414 *
8415 *==========================================================================*/
8416void QCamera3HardwareInterface::setInvalidLandmarks(
8417 int32_t *landmarks)
8418{
8419 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8420 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8421 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8422 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8423 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8424 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008425}
8426
8427#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008428
8429/*===========================================================================
8430 * FUNCTION : getCapabilities
8431 *
8432 * DESCRIPTION: query camera capability from back-end
8433 *
8434 * PARAMETERS :
8435 * @ops : mm-interface ops structure
8436 * @cam_handle : camera handle for which we need capability
8437 *
8438 * RETURN : ptr type of capability structure
8439 * capability for success
8440 * NULL for failure
8441 *==========================================================================*/
8442cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8443 uint32_t cam_handle)
8444{
8445 int rc = NO_ERROR;
8446 QCamera3HeapMemory *capabilityHeap = NULL;
8447 cam_capability_t *cap_ptr = NULL;
8448
8449 if (ops == NULL) {
8450 LOGE("Invalid arguments");
8451 return NULL;
8452 }
8453
8454 capabilityHeap = new QCamera3HeapMemory(1);
8455 if (capabilityHeap == NULL) {
8456 LOGE("creation of capabilityHeap failed");
8457 return NULL;
8458 }
8459
8460 /* Allocate memory for capability buffer */
8461 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8462 if(rc != OK) {
8463 LOGE("No memory for cappability");
8464 goto allocate_failed;
8465 }
8466
8467 /* Map memory for capability buffer */
8468 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8469
8470 rc = ops->map_buf(cam_handle,
8471 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8472 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8473 if(rc < 0) {
8474 LOGE("failed to map capability buffer");
8475 rc = FAILED_TRANSACTION;
8476 goto map_failed;
8477 }
8478
8479 /* Query Capability */
8480 rc = ops->query_capability(cam_handle);
8481 if(rc < 0) {
8482 LOGE("failed to query capability");
8483 rc = FAILED_TRANSACTION;
8484 goto query_failed;
8485 }
8486
8487 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8488 if (cap_ptr == NULL) {
8489 LOGE("out of memory");
8490 rc = NO_MEMORY;
8491 goto query_failed;
8492 }
8493
8494 memset(cap_ptr, 0, sizeof(cam_capability_t));
8495 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8496
8497 int index;
8498 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8499 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8500 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8501 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8502 }
8503
8504query_failed:
8505 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8506map_failed:
8507 capabilityHeap->deallocate();
8508allocate_failed:
8509 delete capabilityHeap;
8510
8511 if (rc != NO_ERROR) {
8512 return NULL;
8513 } else {
8514 return cap_ptr;
8515 }
8516}
8517
Thierry Strudel3d639192016-09-09 11:52:26 -07008518/*===========================================================================
8519 * FUNCTION : initCapabilities
8520 *
8521 * DESCRIPTION: initialize camera capabilities in static data struct
8522 *
8523 * PARAMETERS :
8524 * @cameraId : camera Id
8525 *
8526 * RETURN : int32_t type of status
8527 * NO_ERROR -- success
8528 * none-zero failure code
8529 *==========================================================================*/
8530int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8531{
8532 int rc = 0;
8533 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008534 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008535
8536 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8537 if (rc) {
8538 LOGE("camera_open failed. rc = %d", rc);
8539 goto open_failed;
8540 }
8541 if (!cameraHandle) {
8542 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8543 goto open_failed;
8544 }
8545
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008546 handle = get_main_camera_handle(cameraHandle->camera_handle);
8547 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8548 if (gCamCapability[cameraId] == NULL) {
8549 rc = FAILED_TRANSACTION;
8550 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008551 }
8552
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008553 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008554 if (is_dual_camera_by_idx(cameraId)) {
8555 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8556 gCamCapability[cameraId]->aux_cam_cap =
8557 getCapabilities(cameraHandle->ops, handle);
8558 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8559 rc = FAILED_TRANSACTION;
8560 free(gCamCapability[cameraId]);
8561 goto failed_op;
8562 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008563
8564 // Copy the main camera capability to main_cam_cap struct
8565 gCamCapability[cameraId]->main_cam_cap =
8566 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8567 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8568 LOGE("out of memory");
8569 rc = NO_MEMORY;
8570 goto failed_op;
8571 }
8572 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8573 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008574 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008575failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008576 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8577 cameraHandle = NULL;
8578open_failed:
8579 return rc;
8580}
8581
8582/*==========================================================================
8583 * FUNCTION : get3Aversion
8584 *
8585 * DESCRIPTION: get the Q3A S/W version
8586 *
8587 * PARAMETERS :
8588 * @sw_version: Reference of Q3A structure which will hold version info upon
8589 * return
8590 *
8591 * RETURN : None
8592 *
8593 *==========================================================================*/
8594void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8595{
8596 if(gCamCapability[mCameraId])
8597 sw_version = gCamCapability[mCameraId]->q3a_version;
8598 else
8599 LOGE("Capability structure NULL!");
8600}
8601
8602
8603/*===========================================================================
8604 * FUNCTION : initParameters
8605 *
8606 * DESCRIPTION: initialize camera parameters
8607 *
8608 * PARAMETERS :
8609 *
8610 * RETURN : int32_t type of status
8611 * NO_ERROR -- success
8612 * none-zero failure code
8613 *==========================================================================*/
8614int QCamera3HardwareInterface::initParameters()
8615{
8616 int rc = 0;
8617
8618 //Allocate Set Param Buffer
8619 mParamHeap = new QCamera3HeapMemory(1);
8620 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8621 if(rc != OK) {
8622 rc = NO_MEMORY;
8623 LOGE("Failed to allocate SETPARM Heap memory");
8624 delete mParamHeap;
8625 mParamHeap = NULL;
8626 return rc;
8627 }
8628
8629 //Map memory for parameters buffer
8630 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8631 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8632 mParamHeap->getFd(0),
8633 sizeof(metadata_buffer_t),
8634 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8635 if(rc < 0) {
8636 LOGE("failed to map SETPARM buffer");
8637 rc = FAILED_TRANSACTION;
8638 mParamHeap->deallocate();
8639 delete mParamHeap;
8640 mParamHeap = NULL;
8641 return rc;
8642 }
8643
8644 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8645
8646 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8647 return rc;
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : deinitParameters
8652 *
8653 * DESCRIPTION: de-initialize camera parameters
8654 *
8655 * PARAMETERS :
8656 *
8657 * RETURN : NONE
8658 *==========================================================================*/
8659void QCamera3HardwareInterface::deinitParameters()
8660{
8661 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8662 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8663
8664 mParamHeap->deallocate();
8665 delete mParamHeap;
8666 mParamHeap = NULL;
8667
8668 mParameters = NULL;
8669
8670 free(mPrevParameters);
8671 mPrevParameters = NULL;
8672}
8673
8674/*===========================================================================
8675 * FUNCTION : calcMaxJpegSize
8676 *
8677 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8678 *
8679 * PARAMETERS :
8680 *
8681 * RETURN : max_jpeg_size
8682 *==========================================================================*/
8683size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8684{
8685 size_t max_jpeg_size = 0;
8686 size_t temp_width, temp_height;
8687 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8688 MAX_SIZES_CNT);
8689 for (size_t i = 0; i < count; i++) {
8690 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8691 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8692 if (temp_width * temp_height > max_jpeg_size ) {
8693 max_jpeg_size = temp_width * temp_height;
8694 }
8695 }
8696 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8697 return max_jpeg_size;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION : getMaxRawSize
8702 *
8703 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8704 *
8705 * PARAMETERS :
8706 *
8707 * RETURN : Largest supported Raw Dimension
8708 *==========================================================================*/
8709cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8710{
8711 int max_width = 0;
8712 cam_dimension_t maxRawSize;
8713
8714 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8715 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8716 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8717 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8718 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8719 }
8720 }
8721 return maxRawSize;
8722}
8723
8724
8725/*===========================================================================
8726 * FUNCTION : calcMaxJpegDim
8727 *
8728 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8729 *
8730 * PARAMETERS :
8731 *
8732 * RETURN : max_jpeg_dim
8733 *==========================================================================*/
8734cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8735{
8736 cam_dimension_t max_jpeg_dim;
8737 cam_dimension_t curr_jpeg_dim;
8738 max_jpeg_dim.width = 0;
8739 max_jpeg_dim.height = 0;
8740 curr_jpeg_dim.width = 0;
8741 curr_jpeg_dim.height = 0;
8742 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8743 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8744 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8745 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8746 max_jpeg_dim.width * max_jpeg_dim.height ) {
8747 max_jpeg_dim.width = curr_jpeg_dim.width;
8748 max_jpeg_dim.height = curr_jpeg_dim.height;
8749 }
8750 }
8751 return max_jpeg_dim;
8752}
8753
8754/*===========================================================================
8755 * FUNCTION : addStreamConfig
8756 *
8757 * DESCRIPTION: adds the stream configuration to the array
8758 *
8759 * PARAMETERS :
8760 * @available_stream_configs : pointer to stream configuration array
8761 * @scalar_format : scalar format
8762 * @dim : configuration dimension
8763 * @config_type : input or output configuration type
8764 *
8765 * RETURN : NONE
8766 *==========================================================================*/
8767void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8768 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8769{
8770 available_stream_configs.add(scalar_format);
8771 available_stream_configs.add(dim.width);
8772 available_stream_configs.add(dim.height);
8773 available_stream_configs.add(config_type);
8774}
8775
8776/*===========================================================================
8777 * FUNCTION : suppportBurstCapture
8778 *
8779 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8780 *
8781 * PARAMETERS :
8782 * @cameraId : camera Id
8783 *
8784 * RETURN : true if camera supports BURST_CAPTURE
8785 * false otherwise
8786 *==========================================================================*/
8787bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8788{
8789 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8790 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8791 const int32_t highResWidth = 3264;
8792 const int32_t highResHeight = 2448;
8793
8794 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8795 // Maximum resolution images cannot be captured at >= 10fps
8796 // -> not supporting BURST_CAPTURE
8797 return false;
8798 }
8799
8800 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8801 // Maximum resolution images can be captured at >= 20fps
8802 // --> supporting BURST_CAPTURE
8803 return true;
8804 }
8805
8806 // Find the smallest highRes resolution, or largest resolution if there is none
8807 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8808 MAX_SIZES_CNT);
8809 size_t highRes = 0;
8810 while ((highRes + 1 < totalCnt) &&
8811 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8812 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8813 highResWidth * highResHeight)) {
8814 highRes++;
8815 }
8816 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8817 return true;
8818 } else {
8819 return false;
8820 }
8821}
8822
8823/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008824 * FUNCTION : getPDStatIndex
8825 *
8826 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8827 *
8828 * PARAMETERS :
8829 * @caps : camera capabilities
8830 *
8831 * RETURN : int32_t type
8832 * non-negative - on success
8833 * -1 - on failure
8834 *==========================================================================*/
8835int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8836 if (nullptr == caps) {
8837 return -1;
8838 }
8839
8840 uint32_t metaRawCount = caps->meta_raw_channel_count;
8841 int32_t ret = -1;
8842 for (size_t i = 0; i < metaRawCount; i++) {
8843 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8844 ret = i;
8845 break;
8846 }
8847 }
8848
8849 return ret;
8850}
8851
8852/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008853 * FUNCTION : initStaticMetadata
8854 *
8855 * DESCRIPTION: initialize the static metadata
8856 *
8857 * PARAMETERS :
8858 * @cameraId : camera Id
8859 *
8860 * RETURN : int32_t type of status
8861 * 0 -- success
8862 * non-zero failure code
8863 *==========================================================================*/
8864int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8865{
8866 int rc = 0;
8867 CameraMetadata staticInfo;
8868 size_t count = 0;
8869 bool limitedDevice = false;
8870 char prop[PROPERTY_VALUE_MAX];
8871 bool supportBurst = false;
8872
8873 supportBurst = supportBurstCapture(cameraId);
8874
8875 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8876 * guaranteed or if min fps of max resolution is less than 20 fps, its
8877 * advertised as limited device*/
8878 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8879 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8880 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8881 !supportBurst;
8882
8883 uint8_t supportedHwLvl = limitedDevice ?
8884 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008885#ifndef USE_HAL_3_3
8886 // LEVEL_3 - This device will support level 3.
8887 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8888#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008889 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008890#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008891
8892 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8893 &supportedHwLvl, 1);
8894
8895 bool facingBack = false;
8896 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8897 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8898 facingBack = true;
8899 }
8900 /*HAL 3 only*/
8901 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8902 &gCamCapability[cameraId]->min_focus_distance, 1);
8903
8904 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8905 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8906
8907 /*should be using focal lengths but sensor doesn't provide that info now*/
8908 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8909 &gCamCapability[cameraId]->focal_length,
8910 1);
8911
8912 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8913 gCamCapability[cameraId]->apertures,
8914 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8915
8916 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8917 gCamCapability[cameraId]->filter_densities,
8918 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8919
8920
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008921 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8922 size_t mode_count =
8923 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8924 for (size_t i = 0; i < mode_count; i++) {
8925 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008927 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008928 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008929
8930 int32_t lens_shading_map_size[] = {
8931 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8932 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8933 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8934 lens_shading_map_size,
8935 sizeof(lens_shading_map_size)/sizeof(int32_t));
8936
8937 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8938 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8939
8940 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8941 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8942
8943 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8944 &gCamCapability[cameraId]->max_frame_duration, 1);
8945
8946 camera_metadata_rational baseGainFactor = {
8947 gCamCapability[cameraId]->base_gain_factor.numerator,
8948 gCamCapability[cameraId]->base_gain_factor.denominator};
8949 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8950 &baseGainFactor, 1);
8951
8952 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8953 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8954
8955 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8956 gCamCapability[cameraId]->pixel_array_size.height};
8957 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8958 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8959
8960 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8961 gCamCapability[cameraId]->active_array_size.top,
8962 gCamCapability[cameraId]->active_array_size.width,
8963 gCamCapability[cameraId]->active_array_size.height};
8964 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8965 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8966
8967 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8968 &gCamCapability[cameraId]->white_level, 1);
8969
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008970 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8971 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8972 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008973 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008974 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008975
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008976#ifndef USE_HAL_3_3
8977 bool hasBlackRegions = false;
8978 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8979 LOGW("black_region_count: %d is bounded to %d",
8980 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8981 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8982 }
8983 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8984 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8985 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8986 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8987 }
8988 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8989 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8990 hasBlackRegions = true;
8991 }
8992#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008993 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8994 &gCamCapability[cameraId]->flash_charge_duration, 1);
8995
8996 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8997 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8998
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008999 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9000 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9001 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009002 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9003 &timestampSource, 1);
9004
Thierry Strudel54dc9782017-02-15 12:12:10 -08009005 //update histogram vendor data
9006 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009007 &gCamCapability[cameraId]->histogram_size, 1);
9008
Thierry Strudel54dc9782017-02-15 12:12:10 -08009009 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009010 &gCamCapability[cameraId]->max_histogram_count, 1);
9011
Shuzhen Wang14415f52016-11-16 18:26:18 -08009012 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9013 //so that app can request fewer number of bins than the maximum supported.
9014 std::vector<int32_t> histBins;
9015 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9016 histBins.push_back(maxHistBins);
9017 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9018 (maxHistBins & 0x1) == 0) {
9019 histBins.push_back(maxHistBins >> 1);
9020 maxHistBins >>= 1;
9021 }
9022 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9023 histBins.data(), histBins.size());
9024
Thierry Strudel3d639192016-09-09 11:52:26 -07009025 int32_t sharpness_map_size[] = {
9026 gCamCapability[cameraId]->sharpness_map_size.width,
9027 gCamCapability[cameraId]->sharpness_map_size.height};
9028
9029 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9030 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9031
9032 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9033 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9034
Emilian Peev0f3c3162017-03-15 12:57:46 +00009035 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9036 if (0 <= indexPD) {
9037 // Advertise PD stats data as part of the Depth capabilities
9038 int32_t depthWidth =
9039 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9040 int32_t depthHeight =
9041 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9042 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9043 assert(0 < depthSamplesCount);
9044 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9045 &depthSamplesCount, 1);
9046
9047 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9048 depthHeight,
9049 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9050 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9051 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9052 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9053 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9054
9055 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9056 depthHeight, 33333333,
9057 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9058 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9059 depthMinDuration,
9060 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9061
9062 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9063 depthHeight, 0,
9064 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9065 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9066 depthStallDuration,
9067 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9068
9069 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9070 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9071 }
9072
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 int32_t scalar_formats[] = {
9074 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9075 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9076 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9077 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9078 HAL_PIXEL_FORMAT_RAW10,
9079 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009080 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9081 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9082 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009083
9084 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9085 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9086 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9087 count, MAX_SIZES_CNT, available_processed_sizes);
9088 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9089 available_processed_sizes, count * 2);
9090
9091 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9092 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9093 makeTable(gCamCapability[cameraId]->raw_dim,
9094 count, MAX_SIZES_CNT, available_raw_sizes);
9095 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9096 available_raw_sizes, count * 2);
9097
9098 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9099 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9100 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9101 count, MAX_SIZES_CNT, available_fps_ranges);
9102 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9103 available_fps_ranges, count * 2);
9104
9105 camera_metadata_rational exposureCompensationStep = {
9106 gCamCapability[cameraId]->exp_compensation_step.numerator,
9107 gCamCapability[cameraId]->exp_compensation_step.denominator};
9108 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9109 &exposureCompensationStep, 1);
9110
9111 Vector<uint8_t> availableVstabModes;
9112 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9113 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009114 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009115 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009116 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009117 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009118 count = IS_TYPE_MAX;
9119 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9120 for (size_t i = 0; i < count; i++) {
9121 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9122 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9123 eisSupported = true;
9124 break;
9125 }
9126 }
9127 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009128 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9129 }
9130 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9131 availableVstabModes.array(), availableVstabModes.size());
9132
9133 /*HAL 1 and HAL 3 common*/
9134 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9135 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9136 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
9137 float maxZoom = maxZoomStep/minZoomStep;
9138 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9139 &maxZoom, 1);
9140
9141 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9142 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9143
9144 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9145 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9146 max3aRegions[2] = 0; /* AF not supported */
9147 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9148 max3aRegions, 3);
9149
9150 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9151 memset(prop, 0, sizeof(prop));
9152 property_get("persist.camera.facedetect", prop, "1");
9153 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9154 LOGD("Support face detection mode: %d",
9155 supportedFaceDetectMode);
9156
9157 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009158 /* support mode should be OFF if max number of face is 0 */
9159 if (maxFaces <= 0) {
9160 supportedFaceDetectMode = 0;
9161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009162 Vector<uint8_t> availableFaceDetectModes;
9163 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9164 if (supportedFaceDetectMode == 1) {
9165 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9166 } else if (supportedFaceDetectMode == 2) {
9167 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9168 } else if (supportedFaceDetectMode == 3) {
9169 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9170 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9171 } else {
9172 maxFaces = 0;
9173 }
9174 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9175 availableFaceDetectModes.array(),
9176 availableFaceDetectModes.size());
9177 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9178 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009179 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9180 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9181 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009182
9183 int32_t exposureCompensationRange[] = {
9184 gCamCapability[cameraId]->exposure_compensation_min,
9185 gCamCapability[cameraId]->exposure_compensation_max};
9186 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9187 exposureCompensationRange,
9188 sizeof(exposureCompensationRange)/sizeof(int32_t));
9189
9190 uint8_t lensFacing = (facingBack) ?
9191 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9192 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9193
9194 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9195 available_thumbnail_sizes,
9196 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9197
9198 /*all sizes will be clubbed into this tag*/
9199 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9200 /*android.scaler.availableStreamConfigurations*/
9201 Vector<int32_t> available_stream_configs;
9202 cam_dimension_t active_array_dim;
9203 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9204 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009205
9206 /*advertise list of input dimensions supported based on below property.
9207 By default all sizes upto 5MP will be advertised.
9208 Note that the setprop resolution format should be WxH.
9209 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9210 To list all supported sizes, setprop needs to be set with "0x0" */
9211 cam_dimension_t minInputSize = {2592,1944}; //5MP
9212 memset(prop, 0, sizeof(prop));
9213 property_get("persist.camera.input.minsize", prop, "2592x1944");
9214 if (strlen(prop) > 0) {
9215 char *saveptr = NULL;
9216 char *token = strtok_r(prop, "x", &saveptr);
9217 if (token != NULL) {
9218 minInputSize.width = atoi(token);
9219 }
9220 token = strtok_r(NULL, "x", &saveptr);
9221 if (token != NULL) {
9222 minInputSize.height = atoi(token);
9223 }
9224 }
9225
Thierry Strudel3d639192016-09-09 11:52:26 -07009226 /* Add input/output stream configurations for each scalar formats*/
9227 for (size_t j = 0; j < scalar_formats_count; j++) {
9228 switch (scalar_formats[j]) {
9229 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9230 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9231 case HAL_PIXEL_FORMAT_RAW10:
9232 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9233 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9234 addStreamConfig(available_stream_configs, scalar_formats[j],
9235 gCamCapability[cameraId]->raw_dim[i],
9236 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9237 }
9238 break;
9239 case HAL_PIXEL_FORMAT_BLOB:
9240 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9241 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9242 addStreamConfig(available_stream_configs, scalar_formats[j],
9243 gCamCapability[cameraId]->picture_sizes_tbl[i],
9244 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9245 }
9246 break;
9247 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9248 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9249 default:
9250 cam_dimension_t largest_picture_size;
9251 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9252 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9253 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9254 addStreamConfig(available_stream_configs, scalar_formats[j],
9255 gCamCapability[cameraId]->picture_sizes_tbl[i],
9256 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009257 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9258 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9259 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9260 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9261 >= minInputSize.width) || (gCamCapability[cameraId]->
9262 picture_sizes_tbl[i].height >= minInputSize.height)) {
9263 addStreamConfig(available_stream_configs, scalar_formats[j],
9264 gCamCapability[cameraId]->picture_sizes_tbl[i],
9265 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9266 }
9267 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009268 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009269
Thierry Strudel3d639192016-09-09 11:52:26 -07009270 break;
9271 }
9272 }
9273
9274 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9275 available_stream_configs.array(), available_stream_configs.size());
9276 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9277 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9278
9279 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9280 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9281
9282 /* android.scaler.availableMinFrameDurations */
9283 Vector<int64_t> available_min_durations;
9284 for (size_t j = 0; j < scalar_formats_count; j++) {
9285 switch (scalar_formats[j]) {
9286 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9287 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9288 case HAL_PIXEL_FORMAT_RAW10:
9289 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9290 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9291 available_min_durations.add(scalar_formats[j]);
9292 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9293 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9294 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9295 }
9296 break;
9297 default:
9298 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9299 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9300 available_min_durations.add(scalar_formats[j]);
9301 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9302 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9303 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9304 }
9305 break;
9306 }
9307 }
9308 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9309 available_min_durations.array(), available_min_durations.size());
9310
9311 Vector<int32_t> available_hfr_configs;
9312 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9313 int32_t fps = 0;
9314 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9315 case CAM_HFR_MODE_60FPS:
9316 fps = 60;
9317 break;
9318 case CAM_HFR_MODE_90FPS:
9319 fps = 90;
9320 break;
9321 case CAM_HFR_MODE_120FPS:
9322 fps = 120;
9323 break;
9324 case CAM_HFR_MODE_150FPS:
9325 fps = 150;
9326 break;
9327 case CAM_HFR_MODE_180FPS:
9328 fps = 180;
9329 break;
9330 case CAM_HFR_MODE_210FPS:
9331 fps = 210;
9332 break;
9333 case CAM_HFR_MODE_240FPS:
9334 fps = 240;
9335 break;
9336 case CAM_HFR_MODE_480FPS:
9337 fps = 480;
9338 break;
9339 case CAM_HFR_MODE_OFF:
9340 case CAM_HFR_MODE_MAX:
9341 default:
9342 break;
9343 }
9344
9345 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9346 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9347 /* For each HFR frame rate, need to advertise one variable fps range
9348 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9349 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9350 * set by the app. When video recording is started, [120, 120] is
9351 * set. This way sensor configuration does not change when recording
9352 * is started */
9353
9354 /* (width, height, fps_min, fps_max, batch_size_max) */
9355 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9356 j < MAX_SIZES_CNT; j++) {
9357 available_hfr_configs.add(
9358 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9359 available_hfr_configs.add(
9360 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9361 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9362 available_hfr_configs.add(fps);
9363 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9364
9365 /* (width, height, fps_min, fps_max, batch_size_max) */
9366 available_hfr_configs.add(
9367 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9368 available_hfr_configs.add(
9369 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9370 available_hfr_configs.add(fps);
9371 available_hfr_configs.add(fps);
9372 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9373 }
9374 }
9375 }
9376 //Advertise HFR capability only if the property is set
9377 memset(prop, 0, sizeof(prop));
9378 property_get("persist.camera.hal3hfr.enable", prop, "1");
9379 uint8_t hfrEnable = (uint8_t)atoi(prop);
9380
9381 if(hfrEnable && available_hfr_configs.array()) {
9382 staticInfo.update(
9383 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9384 available_hfr_configs.array(), available_hfr_configs.size());
9385 }
9386
9387 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9388 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9389 &max_jpeg_size, 1);
9390
9391 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9392 size_t size = 0;
9393 count = CAM_EFFECT_MODE_MAX;
9394 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9395 for (size_t i = 0; i < count; i++) {
9396 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9397 gCamCapability[cameraId]->supported_effects[i]);
9398 if (NAME_NOT_FOUND != val) {
9399 avail_effects[size] = (uint8_t)val;
9400 size++;
9401 }
9402 }
9403 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9404 avail_effects,
9405 size);
9406
9407 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9408 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9409 size_t supported_scene_modes_cnt = 0;
9410 count = CAM_SCENE_MODE_MAX;
9411 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9412 for (size_t i = 0; i < count; i++) {
9413 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9414 CAM_SCENE_MODE_OFF) {
9415 int val = lookupFwkName(SCENE_MODES_MAP,
9416 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9417 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009418
Thierry Strudel3d639192016-09-09 11:52:26 -07009419 if (NAME_NOT_FOUND != val) {
9420 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9421 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9422 supported_scene_modes_cnt++;
9423 }
9424 }
9425 }
9426 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9427 avail_scene_modes,
9428 supported_scene_modes_cnt);
9429
9430 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9431 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9432 supported_scene_modes_cnt,
9433 CAM_SCENE_MODE_MAX,
9434 scene_mode_overrides,
9435 supported_indexes,
9436 cameraId);
9437
9438 if (supported_scene_modes_cnt == 0) {
9439 supported_scene_modes_cnt = 1;
9440 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9441 }
9442
9443 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9444 scene_mode_overrides, supported_scene_modes_cnt * 3);
9445
9446 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9447 ANDROID_CONTROL_MODE_AUTO,
9448 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9449 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9450 available_control_modes,
9451 3);
9452
9453 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9454 size = 0;
9455 count = CAM_ANTIBANDING_MODE_MAX;
9456 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9457 for (size_t i = 0; i < count; i++) {
9458 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9459 gCamCapability[cameraId]->supported_antibandings[i]);
9460 if (NAME_NOT_FOUND != val) {
9461 avail_antibanding_modes[size] = (uint8_t)val;
9462 size++;
9463 }
9464
9465 }
9466 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9467 avail_antibanding_modes,
9468 size);
9469
9470 uint8_t avail_abberation_modes[] = {
9471 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9472 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9473 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9474 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9475 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9476 if (0 == count) {
9477 // If no aberration correction modes are available for a device, this advertise OFF mode
9478 size = 1;
9479 } else {
9480 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9481 // So, advertize all 3 modes if atleast any one mode is supported as per the
9482 // new M requirement
9483 size = 3;
9484 }
9485 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9486 avail_abberation_modes,
9487 size);
9488
9489 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9490 size = 0;
9491 count = CAM_FOCUS_MODE_MAX;
9492 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9493 for (size_t i = 0; i < count; i++) {
9494 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9495 gCamCapability[cameraId]->supported_focus_modes[i]);
9496 if (NAME_NOT_FOUND != val) {
9497 avail_af_modes[size] = (uint8_t)val;
9498 size++;
9499 }
9500 }
9501 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9502 avail_af_modes,
9503 size);
9504
9505 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9506 size = 0;
9507 count = CAM_WB_MODE_MAX;
9508 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9509 for (size_t i = 0; i < count; i++) {
9510 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9511 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9512 gCamCapability[cameraId]->supported_white_balances[i]);
9513 if (NAME_NOT_FOUND != val) {
9514 avail_awb_modes[size] = (uint8_t)val;
9515 size++;
9516 }
9517 }
9518 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9519 avail_awb_modes,
9520 size);
9521
9522 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9523 count = CAM_FLASH_FIRING_LEVEL_MAX;
9524 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9525 count);
9526 for (size_t i = 0; i < count; i++) {
9527 available_flash_levels[i] =
9528 gCamCapability[cameraId]->supported_firing_levels[i];
9529 }
9530 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9531 available_flash_levels, count);
9532
9533 uint8_t flashAvailable;
9534 if (gCamCapability[cameraId]->flash_available)
9535 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9536 else
9537 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9538 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9539 &flashAvailable, 1);
9540
9541 Vector<uint8_t> avail_ae_modes;
9542 count = CAM_AE_MODE_MAX;
9543 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9544 for (size_t i = 0; i < count; i++) {
9545 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9546 }
9547 if (flashAvailable) {
9548 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9549 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009550 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009551 }
9552 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9553 avail_ae_modes.array(),
9554 avail_ae_modes.size());
9555
9556 int32_t sensitivity_range[2];
9557 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9558 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9559 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9560 sensitivity_range,
9561 sizeof(sensitivity_range) / sizeof(int32_t));
9562
9563 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9564 &gCamCapability[cameraId]->max_analog_sensitivity,
9565 1);
9566
9567 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9568 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9569 &sensor_orientation,
9570 1);
9571
9572 int32_t max_output_streams[] = {
9573 MAX_STALLING_STREAMS,
9574 MAX_PROCESSED_STREAMS,
9575 MAX_RAW_STREAMS};
9576 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9577 max_output_streams,
9578 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9579
9580 uint8_t avail_leds = 0;
9581 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9582 &avail_leds, 0);
9583
9584 uint8_t focus_dist_calibrated;
9585 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9586 gCamCapability[cameraId]->focus_dist_calibrated);
9587 if (NAME_NOT_FOUND != val) {
9588 focus_dist_calibrated = (uint8_t)val;
9589 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9590 &focus_dist_calibrated, 1);
9591 }
9592
9593 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9594 size = 0;
9595 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9596 MAX_TEST_PATTERN_CNT);
9597 for (size_t i = 0; i < count; i++) {
9598 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9599 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9600 if (NAME_NOT_FOUND != testpatternMode) {
9601 avail_testpattern_modes[size] = testpatternMode;
9602 size++;
9603 }
9604 }
9605 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9606 avail_testpattern_modes,
9607 size);
9608
9609 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9610 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9611 &max_pipeline_depth,
9612 1);
9613
9614 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9615 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9616 &partial_result_count,
9617 1);
9618
9619 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9620 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9621
9622 Vector<uint8_t> available_capabilities;
9623 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9624 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9625 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9626 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9627 if (supportBurst) {
9628 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9629 }
9630 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9631 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9632 if (hfrEnable && available_hfr_configs.array()) {
9633 available_capabilities.add(
9634 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9635 }
9636
9637 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9638 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9639 }
9640 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9641 available_capabilities.array(),
9642 available_capabilities.size());
9643
9644 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9645 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9646 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9647 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9648
9649 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9650 &aeLockAvailable, 1);
9651
9652 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9653 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9654 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9655 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9656
9657 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9658 &awbLockAvailable, 1);
9659
9660 int32_t max_input_streams = 1;
9661 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9662 &max_input_streams,
9663 1);
9664
9665 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9666 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9667 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9668 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9669 HAL_PIXEL_FORMAT_YCbCr_420_888};
9670 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9671 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9672
9673 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9674 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9675 &max_latency,
9676 1);
9677
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009678#ifndef USE_HAL_3_3
9679 int32_t isp_sensitivity_range[2];
9680 isp_sensitivity_range[0] =
9681 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9682 isp_sensitivity_range[1] =
9683 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9684 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9685 isp_sensitivity_range,
9686 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9687#endif
9688
Thierry Strudel3d639192016-09-09 11:52:26 -07009689 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9690 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9691 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9692 available_hot_pixel_modes,
9693 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9694
9695 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9696 ANDROID_SHADING_MODE_FAST,
9697 ANDROID_SHADING_MODE_HIGH_QUALITY};
9698 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9699 available_shading_modes,
9700 3);
9701
9702 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9703 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9704 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9705 available_lens_shading_map_modes,
9706 2);
9707
9708 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9709 ANDROID_EDGE_MODE_FAST,
9710 ANDROID_EDGE_MODE_HIGH_QUALITY,
9711 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9712 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9713 available_edge_modes,
9714 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9715
9716 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9717 ANDROID_NOISE_REDUCTION_MODE_FAST,
9718 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9719 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9720 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9721 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9722 available_noise_red_modes,
9723 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9724
9725 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9726 ANDROID_TONEMAP_MODE_FAST,
9727 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9728 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9729 available_tonemap_modes,
9730 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9731
9732 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9733 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9734 available_hot_pixel_map_modes,
9735 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9736
9737 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9738 gCamCapability[cameraId]->reference_illuminant1);
9739 if (NAME_NOT_FOUND != val) {
9740 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9741 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9742 }
9743
9744 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9745 gCamCapability[cameraId]->reference_illuminant2);
9746 if (NAME_NOT_FOUND != val) {
9747 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9748 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9749 }
9750
9751 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9752 (void *)gCamCapability[cameraId]->forward_matrix1,
9753 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9754
9755 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9756 (void *)gCamCapability[cameraId]->forward_matrix2,
9757 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9758
9759 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9760 (void *)gCamCapability[cameraId]->color_transform1,
9761 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9762
9763 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9764 (void *)gCamCapability[cameraId]->color_transform2,
9765 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9766
9767 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9768 (void *)gCamCapability[cameraId]->calibration_transform1,
9769 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9770
9771 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9772 (void *)gCamCapability[cameraId]->calibration_transform2,
9773 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9774
9775 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9776 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9777 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9778 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9779 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9780 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9781 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9782 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9783 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9784 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9785 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9786 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9787 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9788 ANDROID_JPEG_GPS_COORDINATES,
9789 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9790 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9791 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9792 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9793 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9794 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9795 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9796 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9797 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9798 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009799#ifndef USE_HAL_3_3
9800 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9801#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009802 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009803 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009804 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9805 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009806 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009807 /* DevCamDebug metadata request_keys_basic */
9808 DEVCAMDEBUG_META_ENABLE,
9809 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009810 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9811 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009812 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009813
9814 size_t request_keys_cnt =
9815 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9816 Vector<int32_t> available_request_keys;
9817 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9818 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9819 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9820 }
9821
9822 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9823 available_request_keys.array(), available_request_keys.size());
9824
9825 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9826 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9827 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9828 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9829 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9830 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9831 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9832 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9833 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9834 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9835 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9836 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9837 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9838 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9839 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9840 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9841 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009842 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009843 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9844 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9845 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009846 ANDROID_STATISTICS_FACE_SCORES,
9847#ifndef USE_HAL_3_3
9848 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9849#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009850 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009851 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009852 // DevCamDebug metadata result_keys_basic
9853 DEVCAMDEBUG_META_ENABLE,
9854 // DevCamDebug metadata result_keys AF
9855 DEVCAMDEBUG_AF_LENS_POSITION,
9856 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9857 DEVCAMDEBUG_AF_TOF_DISTANCE,
9858 DEVCAMDEBUG_AF_LUMA,
9859 DEVCAMDEBUG_AF_HAF_STATE,
9860 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9861 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9862 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9863 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9864 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9865 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9866 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9867 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9868 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9869 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9870 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9871 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9872 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9873 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9874 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9875 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9876 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9877 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9878 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9879 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9880 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9881 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9882 // DevCamDebug metadata result_keys AEC
9883 DEVCAMDEBUG_AEC_TARGET_LUMA,
9884 DEVCAMDEBUG_AEC_COMP_LUMA,
9885 DEVCAMDEBUG_AEC_AVG_LUMA,
9886 DEVCAMDEBUG_AEC_CUR_LUMA,
9887 DEVCAMDEBUG_AEC_LINECOUNT,
9888 DEVCAMDEBUG_AEC_REAL_GAIN,
9889 DEVCAMDEBUG_AEC_EXP_INDEX,
9890 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009891 // DevCamDebug metadata result_keys zzHDR
9892 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9893 DEVCAMDEBUG_AEC_L_LINECOUNT,
9894 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9895 DEVCAMDEBUG_AEC_S_LINECOUNT,
9896 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9897 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9898 // DevCamDebug metadata result_keys ADRC
9899 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9900 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9901 DEVCAMDEBUG_AEC_GTM_RATIO,
9902 DEVCAMDEBUG_AEC_LTM_RATIO,
9903 DEVCAMDEBUG_AEC_LA_RATIO,
9904 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009905 // DevCamDebug metadata result_keys AWB
9906 DEVCAMDEBUG_AWB_R_GAIN,
9907 DEVCAMDEBUG_AWB_G_GAIN,
9908 DEVCAMDEBUG_AWB_B_GAIN,
9909 DEVCAMDEBUG_AWB_CCT,
9910 DEVCAMDEBUG_AWB_DECISION,
9911 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009912 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9913 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9914 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009915 };
9916
Thierry Strudel3d639192016-09-09 11:52:26 -07009917 size_t result_keys_cnt =
9918 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9919
9920 Vector<int32_t> available_result_keys;
9921 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9922 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9923 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9924 }
9925 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9926 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9927 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9928 }
9929 if (supportedFaceDetectMode == 1) {
9930 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9931 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9932 } else if ((supportedFaceDetectMode == 2) ||
9933 (supportedFaceDetectMode == 3)) {
9934 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9935 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9936 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009937#ifndef USE_HAL_3_3
9938 if (hasBlackRegions) {
9939 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9940 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9941 }
9942#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009943 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9944 available_result_keys.array(), available_result_keys.size());
9945
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009946 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009947 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9948 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9949 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9950 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9951 ANDROID_SCALER_CROPPING_TYPE,
9952 ANDROID_SYNC_MAX_LATENCY,
9953 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9954 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9955 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9956 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9957 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9958 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9959 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9960 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9961 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9962 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9963 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9964 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9965 ANDROID_LENS_FACING,
9966 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9967 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9968 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9969 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9970 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9971 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9972 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9973 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9974 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9975 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9976 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9977 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9978 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9979 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9980 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9981 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9982 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9983 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9984 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9985 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009986 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009987 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9988 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9989 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9990 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9991 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9992 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9993 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9994 ANDROID_CONTROL_AVAILABLE_MODES,
9995 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9996 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9997 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9998 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009999 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10000#ifndef USE_HAL_3_3
10001 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10002 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10003#endif
10004 };
10005
10006 Vector<int32_t> available_characteristics_keys;
10007 available_characteristics_keys.appendArray(characteristics_keys_basic,
10008 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10009#ifndef USE_HAL_3_3
10010 if (hasBlackRegions) {
10011 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10012 }
10013#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010014
10015 if (0 <= indexPD) {
10016 int32_t depthKeys[] = {
10017 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10018 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10019 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10020 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10021 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10022 };
10023 available_characteristics_keys.appendArray(depthKeys,
10024 sizeof(depthKeys) / sizeof(depthKeys[0]));
10025 }
10026
Thierry Strudel3d639192016-09-09 11:52:26 -070010027 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010028 available_characteristics_keys.array(),
10029 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010030
10031 /*available stall durations depend on the hw + sw and will be different for different devices */
10032 /*have to add for raw after implementation*/
10033 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10034 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10035
10036 Vector<int64_t> available_stall_durations;
10037 for (uint32_t j = 0; j < stall_formats_count; j++) {
10038 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10039 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10040 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10041 available_stall_durations.add(stall_formats[j]);
10042 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10043 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10044 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10045 }
10046 } else {
10047 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10048 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10049 available_stall_durations.add(stall_formats[j]);
10050 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10051 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10052 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10053 }
10054 }
10055 }
10056 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10057 available_stall_durations.array(),
10058 available_stall_durations.size());
10059
10060 //QCAMERA3_OPAQUE_RAW
10061 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10062 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10063 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10064 case LEGACY_RAW:
10065 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10066 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10067 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10068 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10069 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10070 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10071 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10072 break;
10073 case MIPI_RAW:
10074 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10075 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10076 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10077 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10078 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10079 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10080 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10081 break;
10082 default:
10083 LOGE("unknown opaque_raw_format %d",
10084 gCamCapability[cameraId]->opaque_raw_fmt);
10085 break;
10086 }
10087 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10088
10089 Vector<int32_t> strides;
10090 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10091 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10092 cam_stream_buf_plane_info_t buf_planes;
10093 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10094 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10095 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10096 &gCamCapability[cameraId]->padding_info, &buf_planes);
10097 strides.add(buf_planes.plane_info.mp[0].stride);
10098 }
10099 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10100 strides.size());
10101
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010102 //TBD: remove the following line once backend advertises zzHDR in feature mask
10103 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010104 //Video HDR default
10105 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10106 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010107 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010108 int32_t vhdr_mode[] = {
10109 QCAMERA3_VIDEO_HDR_MODE_OFF,
10110 QCAMERA3_VIDEO_HDR_MODE_ON};
10111
10112 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10113 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10114 vhdr_mode, vhdr_mode_count);
10115 }
10116
Thierry Strudel3d639192016-09-09 11:52:26 -070010117 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10118 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10119 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10120
10121 uint8_t isMonoOnly =
10122 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10123 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10124 &isMonoOnly, 1);
10125
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010126#ifndef USE_HAL_3_3
10127 Vector<int32_t> opaque_size;
10128 for (size_t j = 0; j < scalar_formats_count; j++) {
10129 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10130 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10131 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10132 cam_stream_buf_plane_info_t buf_planes;
10133
10134 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10135 &gCamCapability[cameraId]->padding_info, &buf_planes);
10136
10137 if (rc == 0) {
10138 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10139 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10140 opaque_size.add(buf_planes.plane_info.frame_len);
10141 }else {
10142 LOGE("raw frame calculation failed!");
10143 }
10144 }
10145 }
10146 }
10147
10148 if ((opaque_size.size() > 0) &&
10149 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10150 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10151 else
10152 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10153#endif
10154
Thierry Strudel04e026f2016-10-10 11:27:36 -070010155 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10156 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10157 size = 0;
10158 count = CAM_IR_MODE_MAX;
10159 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10160 for (size_t i = 0; i < count; i++) {
10161 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10162 gCamCapability[cameraId]->supported_ir_modes[i]);
10163 if (NAME_NOT_FOUND != val) {
10164 avail_ir_modes[size] = (int32_t)val;
10165 size++;
10166 }
10167 }
10168 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10169 avail_ir_modes, size);
10170 }
10171
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010172 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10173 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10174 size = 0;
10175 count = CAM_AEC_CONVERGENCE_MAX;
10176 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10177 for (size_t i = 0; i < count; i++) {
10178 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10179 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10180 if (NAME_NOT_FOUND != val) {
10181 available_instant_aec_modes[size] = (int32_t)val;
10182 size++;
10183 }
10184 }
10185 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10186 available_instant_aec_modes, size);
10187 }
10188
Thierry Strudel54dc9782017-02-15 12:12:10 -080010189 int32_t sharpness_range[] = {
10190 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10191 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10192 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10193
10194 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10195 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10196 size = 0;
10197 count = CAM_BINNING_CORRECTION_MODE_MAX;
10198 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10199 for (size_t i = 0; i < count; i++) {
10200 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10201 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10202 gCamCapability[cameraId]->supported_binning_modes[i]);
10203 if (NAME_NOT_FOUND != val) {
10204 avail_binning_modes[size] = (int32_t)val;
10205 size++;
10206 }
10207 }
10208 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10209 avail_binning_modes, size);
10210 }
10211
10212 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10213 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10214 size = 0;
10215 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10216 for (size_t i = 0; i < count; i++) {
10217 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10218 gCamCapability[cameraId]->supported_aec_modes[i]);
10219 if (NAME_NOT_FOUND != val)
10220 available_aec_modes[size++] = val;
10221 }
10222 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10223 available_aec_modes, size);
10224 }
10225
10226 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10227 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10228 size = 0;
10229 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10230 for (size_t i = 0; i < count; i++) {
10231 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10232 gCamCapability[cameraId]->supported_iso_modes[i]);
10233 if (NAME_NOT_FOUND != val)
10234 available_iso_modes[size++] = val;
10235 }
10236 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10237 available_iso_modes, size);
10238 }
10239
10240 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10241 for (size_t i = 0; i < count; i++)
10242 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10243 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10244 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10245
10246 int32_t available_saturation_range[4];
10247 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10248 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10249 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10250 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10251 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10252 available_saturation_range, 4);
10253
10254 uint8_t is_hdr_values[2];
10255 is_hdr_values[0] = 0;
10256 is_hdr_values[1] = 1;
10257 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10258 is_hdr_values, 2);
10259
10260 float is_hdr_confidence_range[2];
10261 is_hdr_confidence_range[0] = 0.0;
10262 is_hdr_confidence_range[1] = 1.0;
10263 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10264 is_hdr_confidence_range, 2);
10265
Emilian Peev0a972ef2017-03-16 10:25:53 +000010266 size_t eepromLength = strnlen(
10267 reinterpret_cast<const char *>(
10268 gCamCapability[cameraId]->eeprom_version_info),
10269 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10270 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010271 char easelInfo[] = ",E:N";
10272 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10273 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10274 eepromLength += sizeof(easelInfo);
10275 strlcat(eepromInfo, (gHdrPlusClient ? ",E:Y" : ",E:N"), MAX_EEPROM_VERSION_INFO_LEN);
10276 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010277 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10278 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10279 }
10280
Thierry Strudel3d639192016-09-09 11:52:26 -070010281 gStaticMetadata[cameraId] = staticInfo.release();
10282 return rc;
10283}
10284
10285/*===========================================================================
10286 * FUNCTION : makeTable
10287 *
10288 * DESCRIPTION: make a table of sizes
10289 *
10290 * PARAMETERS :
10291 *
10292 *
10293 *==========================================================================*/
10294void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10295 size_t max_size, int32_t *sizeTable)
10296{
10297 size_t j = 0;
10298 if (size > max_size) {
10299 size = max_size;
10300 }
10301 for (size_t i = 0; i < size; i++) {
10302 sizeTable[j] = dimTable[i].width;
10303 sizeTable[j+1] = dimTable[i].height;
10304 j+=2;
10305 }
10306}
10307
10308/*===========================================================================
10309 * FUNCTION : makeFPSTable
10310 *
10311 * DESCRIPTION: make a table of fps ranges
10312 *
10313 * PARAMETERS :
10314 *
10315 *==========================================================================*/
10316void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10317 size_t max_size, int32_t *fpsRangesTable)
10318{
10319 size_t j = 0;
10320 if (size > max_size) {
10321 size = max_size;
10322 }
10323 for (size_t i = 0; i < size; i++) {
10324 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10325 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10326 j+=2;
10327 }
10328}
10329
10330/*===========================================================================
10331 * FUNCTION : makeOverridesList
10332 *
10333 * DESCRIPTION: make a list of scene mode overrides
10334 *
10335 * PARAMETERS :
10336 *
10337 *
10338 *==========================================================================*/
10339void QCamera3HardwareInterface::makeOverridesList(
10340 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10341 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10342{
10343 /*daemon will give a list of overrides for all scene modes.
10344 However we should send the fwk only the overrides for the scene modes
10345 supported by the framework*/
10346 size_t j = 0;
10347 if (size > max_size) {
10348 size = max_size;
10349 }
10350 size_t focus_count = CAM_FOCUS_MODE_MAX;
10351 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10352 focus_count);
10353 for (size_t i = 0; i < size; i++) {
10354 bool supt = false;
10355 size_t index = supported_indexes[i];
10356 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10357 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10358 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10359 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10360 overridesTable[index].awb_mode);
10361 if (NAME_NOT_FOUND != val) {
10362 overridesList[j+1] = (uint8_t)val;
10363 }
10364 uint8_t focus_override = overridesTable[index].af_mode;
10365 for (size_t k = 0; k < focus_count; k++) {
10366 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10367 supt = true;
10368 break;
10369 }
10370 }
10371 if (supt) {
10372 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10373 focus_override);
10374 if (NAME_NOT_FOUND != val) {
10375 overridesList[j+2] = (uint8_t)val;
10376 }
10377 } else {
10378 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10379 }
10380 j+=3;
10381 }
10382}
10383
10384/*===========================================================================
10385 * FUNCTION : filterJpegSizes
10386 *
10387 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10388 * could be downscaled to
10389 *
10390 * PARAMETERS :
10391 *
10392 * RETURN : length of jpegSizes array
10393 *==========================================================================*/
10394
10395size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10396 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10397 uint8_t downscale_factor)
10398{
10399 if (0 == downscale_factor) {
10400 downscale_factor = 1;
10401 }
10402
10403 int32_t min_width = active_array_size.width / downscale_factor;
10404 int32_t min_height = active_array_size.height / downscale_factor;
10405 size_t jpegSizesCnt = 0;
10406 if (processedSizesCnt > maxCount) {
10407 processedSizesCnt = maxCount;
10408 }
10409 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10410 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10411 jpegSizes[jpegSizesCnt] = processedSizes[i];
10412 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10413 jpegSizesCnt += 2;
10414 }
10415 }
10416 return jpegSizesCnt;
10417}
10418
10419/*===========================================================================
10420 * FUNCTION : computeNoiseModelEntryS
10421 *
10422 * DESCRIPTION: function to map a given sensitivity to the S noise
10423 * model parameters in the DNG noise model.
10424 *
10425 * PARAMETERS : sens : the sensor sensitivity
10426 *
10427 ** RETURN : S (sensor amplification) noise
10428 *
10429 *==========================================================================*/
10430double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10431 double s = gCamCapability[mCameraId]->gradient_S * sens +
10432 gCamCapability[mCameraId]->offset_S;
10433 return ((s < 0.0) ? 0.0 : s);
10434}
10435
10436/*===========================================================================
10437 * FUNCTION : computeNoiseModelEntryO
10438 *
10439 * DESCRIPTION: function to map a given sensitivity to the O noise
10440 * model parameters in the DNG noise model.
10441 *
10442 * PARAMETERS : sens : the sensor sensitivity
10443 *
10444 ** RETURN : O (sensor readout) noise
10445 *
10446 *==========================================================================*/
10447double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10448 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10449 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10450 1.0 : (1.0 * sens / max_analog_sens);
10451 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10452 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10453 return ((o < 0.0) ? 0.0 : o);
10454}
10455
10456/*===========================================================================
10457 * FUNCTION : getSensorSensitivity
10458 *
10459 * DESCRIPTION: convert iso_mode to an integer value
10460 *
10461 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10462 *
10463 ** RETURN : sensitivity supported by sensor
10464 *
10465 *==========================================================================*/
10466int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10467{
10468 int32_t sensitivity;
10469
10470 switch (iso_mode) {
10471 case CAM_ISO_MODE_100:
10472 sensitivity = 100;
10473 break;
10474 case CAM_ISO_MODE_200:
10475 sensitivity = 200;
10476 break;
10477 case CAM_ISO_MODE_400:
10478 sensitivity = 400;
10479 break;
10480 case CAM_ISO_MODE_800:
10481 sensitivity = 800;
10482 break;
10483 case CAM_ISO_MODE_1600:
10484 sensitivity = 1600;
10485 break;
10486 default:
10487 sensitivity = -1;
10488 break;
10489 }
10490 return sensitivity;
10491}
10492
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010493int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10494 if (gHdrPlusClient != nullptr) {
10495 return OK;
10496 }
10497
10498 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10499 if (gHdrPlusClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010500 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10501 // to connect to Easel.
10502 bool doNotpowerOnEasel =
10503 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10504
10505 if (doNotpowerOnEasel) {
10506 gHdrPlusClient = nullptr;
10507 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10508 return OK;
10509 }
10510
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010511 // If Easel is present, power on Easel and suspend it immediately.
10512 status_t res = gHdrPlusClient->powerOnEasel();
10513 if (res != OK) {
10514 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10515 gHdrPlusClient = nullptr;
10516 return res;
10517 }
10518
10519 res = gHdrPlusClient->suspendEasel();
10520 if (res != OK) {
10521 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10522 }
10523
10524 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10525 } else {
10526 // Destroy HDR+ client if Easel isn't present.
10527 gHdrPlusClient = nullptr;
10528 }
10529
10530 return OK;
10531}
10532
Thierry Strudel3d639192016-09-09 11:52:26 -070010533/*===========================================================================
10534 * FUNCTION : getCamInfo
10535 *
10536 * DESCRIPTION: query camera capabilities
10537 *
10538 * PARAMETERS :
10539 * @cameraId : camera Id
10540 * @info : camera info struct to be filled in with camera capabilities
10541 *
10542 * RETURN : int type of status
10543 * NO_ERROR -- success
10544 * none-zero failure code
10545 *==========================================================================*/
10546int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10547 struct camera_info *info)
10548{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010549 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010550 int rc = 0;
10551
10552 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010553
10554 rc = initHdrPlusClientLocked();
10555 if (rc != OK) {
10556 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10557 pthread_mutex_unlock(&gCamLock);
10558 return rc;
10559 }
10560
Thierry Strudel3d639192016-09-09 11:52:26 -070010561 if (NULL == gCamCapability[cameraId]) {
10562 rc = initCapabilities(cameraId);
10563 if (rc < 0) {
10564 pthread_mutex_unlock(&gCamLock);
10565 return rc;
10566 }
10567 }
10568
10569 if (NULL == gStaticMetadata[cameraId]) {
10570 rc = initStaticMetadata(cameraId);
10571 if (rc < 0) {
10572 pthread_mutex_unlock(&gCamLock);
10573 return rc;
10574 }
10575 }
10576
10577 switch(gCamCapability[cameraId]->position) {
10578 case CAM_POSITION_BACK:
10579 case CAM_POSITION_BACK_AUX:
10580 info->facing = CAMERA_FACING_BACK;
10581 break;
10582
10583 case CAM_POSITION_FRONT:
10584 case CAM_POSITION_FRONT_AUX:
10585 info->facing = CAMERA_FACING_FRONT;
10586 break;
10587
10588 default:
10589 LOGE("Unknown position type %d for camera id:%d",
10590 gCamCapability[cameraId]->position, cameraId);
10591 rc = -1;
10592 break;
10593 }
10594
10595
10596 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010597#ifndef USE_HAL_3_3
10598 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10599#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010600 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010601#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010602 info->static_camera_characteristics = gStaticMetadata[cameraId];
10603
10604 //For now assume both cameras can operate independently.
10605 info->conflicting_devices = NULL;
10606 info->conflicting_devices_length = 0;
10607
10608 //resource cost is 100 * MIN(1.0, m/M),
10609 //where m is throughput requirement with maximum stream configuration
10610 //and M is CPP maximum throughput.
10611 float max_fps = 0.0;
10612 for (uint32_t i = 0;
10613 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10614 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10615 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10616 }
10617 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10618 gCamCapability[cameraId]->active_array_size.width *
10619 gCamCapability[cameraId]->active_array_size.height * max_fps /
10620 gCamCapability[cameraId]->max_pixel_bandwidth;
10621 info->resource_cost = 100 * MIN(1.0, ratio);
10622 LOGI("camera %d resource cost is %d", cameraId,
10623 info->resource_cost);
10624
10625 pthread_mutex_unlock(&gCamLock);
10626 return rc;
10627}
10628
10629/*===========================================================================
10630 * FUNCTION : translateCapabilityToMetadata
10631 *
10632 * DESCRIPTION: translate the capability into camera_metadata_t
10633 *
10634 * PARAMETERS : type of the request
10635 *
10636 *
10637 * RETURN : success: camera_metadata_t*
10638 * failure: NULL
10639 *
10640 *==========================================================================*/
10641camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10642{
10643 if (mDefaultMetadata[type] != NULL) {
10644 return mDefaultMetadata[type];
10645 }
10646 //first time we are handling this request
10647 //fill up the metadata structure using the wrapper class
10648 CameraMetadata settings;
10649 //translate from cam_capability_t to camera_metadata_tag_t
10650 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10651 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10652 int32_t defaultRequestID = 0;
10653 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10654
10655 /* OIS disable */
10656 char ois_prop[PROPERTY_VALUE_MAX];
10657 memset(ois_prop, 0, sizeof(ois_prop));
10658 property_get("persist.camera.ois.disable", ois_prop, "0");
10659 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10660
10661 /* Force video to use OIS */
10662 char videoOisProp[PROPERTY_VALUE_MAX];
10663 memset(videoOisProp, 0, sizeof(videoOisProp));
10664 property_get("persist.camera.ois.video", videoOisProp, "1");
10665 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010666
10667 // Hybrid AE enable/disable
10668 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10669 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10670 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10671 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10672
Thierry Strudel3d639192016-09-09 11:52:26 -070010673 uint8_t controlIntent = 0;
10674 uint8_t focusMode;
10675 uint8_t vsMode;
10676 uint8_t optStabMode;
10677 uint8_t cacMode;
10678 uint8_t edge_mode;
10679 uint8_t noise_red_mode;
10680 uint8_t tonemap_mode;
10681 bool highQualityModeEntryAvailable = FALSE;
10682 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010683 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010684 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10685 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010686 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010687 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010688
Thierry Strudel3d639192016-09-09 11:52:26 -070010689 switch (type) {
10690 case CAMERA3_TEMPLATE_PREVIEW:
10691 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10692 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10693 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10694 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10695 edge_mode = ANDROID_EDGE_MODE_FAST;
10696 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10697 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10698 break;
10699 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10700 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10701 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10702 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10703 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10704 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10705 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10706 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10707 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10708 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10709 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10710 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10711 highQualityModeEntryAvailable = TRUE;
10712 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10713 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10714 fastModeEntryAvailable = TRUE;
10715 }
10716 }
10717 if (highQualityModeEntryAvailable) {
10718 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10719 } else if (fastModeEntryAvailable) {
10720 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10721 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010722 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10723 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10724 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010725 break;
10726 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10727 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10728 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10729 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010730 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10731 edge_mode = ANDROID_EDGE_MODE_FAST;
10732 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10733 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10734 if (forceVideoOis)
10735 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10736 break;
10737 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10738 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10739 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10740 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010741 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10742 edge_mode = ANDROID_EDGE_MODE_FAST;
10743 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10744 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10745 if (forceVideoOis)
10746 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10747 break;
10748 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10749 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10750 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10751 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10752 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10753 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10754 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10755 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10756 break;
10757 case CAMERA3_TEMPLATE_MANUAL:
10758 edge_mode = ANDROID_EDGE_MODE_FAST;
10759 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10760 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10761 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10762 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10763 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10764 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10765 break;
10766 default:
10767 edge_mode = ANDROID_EDGE_MODE_FAST;
10768 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10769 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10770 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10771 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10772 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10773 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10774 break;
10775 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010776 // Set CAC to OFF if underlying device doesn't support
10777 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10778 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10779 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010780 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10781 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10782 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10783 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10784 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10785 }
10786 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010787 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010788 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010789
10790 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10791 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10792 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10793 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10794 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10795 || ois_disable)
10796 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10797 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010798 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010799
10800 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10801 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10802
10803 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10804 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10805
10806 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10807 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10808
10809 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10810 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10811
10812 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10813 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10814
10815 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10816 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10817
10818 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10819 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10820
10821 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10822 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10823
10824 /*flash*/
10825 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10826 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10827
10828 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10829 settings.update(ANDROID_FLASH_FIRING_POWER,
10830 &flashFiringLevel, 1);
10831
10832 /* lens */
10833 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10834 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10835
10836 if (gCamCapability[mCameraId]->filter_densities_count) {
10837 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10838 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10839 gCamCapability[mCameraId]->filter_densities_count);
10840 }
10841
10842 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10843 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10844
Thierry Strudel3d639192016-09-09 11:52:26 -070010845 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10846 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10847
10848 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10849 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10850
10851 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10852 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10853
10854 /* face detection (default to OFF) */
10855 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10856 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10857
Thierry Strudel54dc9782017-02-15 12:12:10 -080010858 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10859 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010860
10861 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10862 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10863
10864 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10865 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10866
Thierry Strudel3d639192016-09-09 11:52:26 -070010867
10868 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10869 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10870
10871 /* Exposure time(Update the Min Exposure Time)*/
10872 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10873 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10874
10875 /* frame duration */
10876 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10877 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10878
10879 /* sensitivity */
10880 static const int32_t default_sensitivity = 100;
10881 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010882#ifndef USE_HAL_3_3
10883 static const int32_t default_isp_sensitivity =
10884 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10885 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10886#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010887
10888 /*edge mode*/
10889 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10890
10891 /*noise reduction mode*/
10892 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10893
10894 /*color correction mode*/
10895 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10896 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10897
10898 /*transform matrix mode*/
10899 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10900
10901 int32_t scaler_crop_region[4];
10902 scaler_crop_region[0] = 0;
10903 scaler_crop_region[1] = 0;
10904 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10905 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10906 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10907
10908 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10909 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10910
10911 /*focus distance*/
10912 float focus_distance = 0.0;
10913 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10914
10915 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010916 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010917 float max_range = 0.0;
10918 float max_fixed_fps = 0.0;
10919 int32_t fps_range[2] = {0, 0};
10920 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10921 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010922 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10923 TEMPLATE_MAX_PREVIEW_FPS) {
10924 continue;
10925 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010926 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10927 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10928 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10929 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10930 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10931 if (range > max_range) {
10932 fps_range[0] =
10933 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10934 fps_range[1] =
10935 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10936 max_range = range;
10937 }
10938 } else {
10939 if (range < 0.01 && max_fixed_fps <
10940 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10941 fps_range[0] =
10942 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10943 fps_range[1] =
10944 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10945 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10946 }
10947 }
10948 }
10949 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10950
10951 /*precapture trigger*/
10952 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10953 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10954
10955 /*af trigger*/
10956 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10957 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10958
10959 /* ae & af regions */
10960 int32_t active_region[] = {
10961 gCamCapability[mCameraId]->active_array_size.left,
10962 gCamCapability[mCameraId]->active_array_size.top,
10963 gCamCapability[mCameraId]->active_array_size.left +
10964 gCamCapability[mCameraId]->active_array_size.width,
10965 gCamCapability[mCameraId]->active_array_size.top +
10966 gCamCapability[mCameraId]->active_array_size.height,
10967 0};
10968 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10969 sizeof(active_region) / sizeof(active_region[0]));
10970 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10971 sizeof(active_region) / sizeof(active_region[0]));
10972
10973 /* black level lock */
10974 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10975 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10976
Thierry Strudel3d639192016-09-09 11:52:26 -070010977 //special defaults for manual template
10978 if (type == CAMERA3_TEMPLATE_MANUAL) {
10979 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10980 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10981
10982 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10983 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10984
10985 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10986 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10987
10988 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10989 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10990
10991 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10992 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10993
10994 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10995 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10996 }
10997
10998
10999 /* TNR
11000 * We'll use this location to determine which modes TNR will be set.
11001 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11002 * This is not to be confused with linking on a per stream basis that decision
11003 * is still on per-session basis and will be handled as part of config stream
11004 */
11005 uint8_t tnr_enable = 0;
11006
11007 if (m_bTnrPreview || m_bTnrVideo) {
11008
11009 switch (type) {
11010 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11011 tnr_enable = 1;
11012 break;
11013
11014 default:
11015 tnr_enable = 0;
11016 break;
11017 }
11018
11019 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11020 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11021 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11022
11023 LOGD("TNR:%d with process plate %d for template:%d",
11024 tnr_enable, tnr_process_type, type);
11025 }
11026
11027 //Update Link tags to default
11028 int32_t sync_type = CAM_TYPE_STANDALONE;
11029 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11030
11031 int32_t is_main = 0; //this doesn't matter as app should overwrite
11032 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11033
11034 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11035
11036 /* CDS default */
11037 char prop[PROPERTY_VALUE_MAX];
11038 memset(prop, 0, sizeof(prop));
11039 property_get("persist.camera.CDS", prop, "Auto");
11040 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11041 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11042 if (CAM_CDS_MODE_MAX == cds_mode) {
11043 cds_mode = CAM_CDS_MODE_AUTO;
11044 }
11045
11046 /* Disabling CDS in templates which have TNR enabled*/
11047 if (tnr_enable)
11048 cds_mode = CAM_CDS_MODE_OFF;
11049
11050 int32_t mode = cds_mode;
11051 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011052
Thierry Strudel269c81a2016-10-12 12:13:59 -070011053 /* Manual Convergence AEC Speed is disabled by default*/
11054 float default_aec_speed = 0;
11055 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11056
11057 /* Manual Convergence AWB Speed is disabled by default*/
11058 float default_awb_speed = 0;
11059 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11060
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011061 // Set instant AEC to normal convergence by default
11062 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11063 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11064
Shuzhen Wang19463d72016-03-08 11:09:52 -080011065 /* hybrid ae */
11066 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11067
Thierry Strudel3d639192016-09-09 11:52:26 -070011068 mDefaultMetadata[type] = settings.release();
11069
11070 return mDefaultMetadata[type];
11071}
11072
11073/*===========================================================================
11074 * FUNCTION : setFrameParameters
11075 *
11076 * DESCRIPTION: set parameters per frame as requested in the metadata from
11077 * framework
11078 *
11079 * PARAMETERS :
11080 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011081 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011082 * @blob_request: Whether this request is a blob request or not
11083 *
11084 * RETURN : success: NO_ERROR
11085 * failure:
11086 *==========================================================================*/
11087int QCamera3HardwareInterface::setFrameParameters(
11088 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011089 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011090 int blob_request,
11091 uint32_t snapshotStreamId)
11092{
11093 /*translate from camera_metadata_t type to parm_type_t*/
11094 int rc = 0;
11095 int32_t hal_version = CAM_HAL_V3;
11096
11097 clear_metadata_buffer(mParameters);
11098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11099 LOGE("Failed to set hal version in the parameters");
11100 return BAD_VALUE;
11101 }
11102
11103 /*we need to update the frame number in the parameters*/
11104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11105 request->frame_number)) {
11106 LOGE("Failed to set the frame number in the parameters");
11107 return BAD_VALUE;
11108 }
11109
11110 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011112 LOGE("Failed to set stream type mask in the parameters");
11113 return BAD_VALUE;
11114 }
11115
11116 if (mUpdateDebugLevel) {
11117 uint32_t dummyDebugLevel = 0;
11118 /* The value of dummyDebugLevel is irrelavent. On
11119 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11121 dummyDebugLevel)) {
11122 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11123 return BAD_VALUE;
11124 }
11125 mUpdateDebugLevel = false;
11126 }
11127
11128 if(request->settings != NULL){
11129 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11130 if (blob_request)
11131 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11132 }
11133
11134 return rc;
11135}
11136
11137/*===========================================================================
11138 * FUNCTION : setReprocParameters
11139 *
11140 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11141 * return it.
11142 *
11143 * PARAMETERS :
11144 * @request : request that needs to be serviced
11145 *
11146 * RETURN : success: NO_ERROR
11147 * failure:
11148 *==========================================================================*/
11149int32_t QCamera3HardwareInterface::setReprocParameters(
11150 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11151 uint32_t snapshotStreamId)
11152{
11153 /*translate from camera_metadata_t type to parm_type_t*/
11154 int rc = 0;
11155
11156 if (NULL == request->settings){
11157 LOGE("Reprocess settings cannot be NULL");
11158 return BAD_VALUE;
11159 }
11160
11161 if (NULL == reprocParam) {
11162 LOGE("Invalid reprocessing metadata buffer");
11163 return BAD_VALUE;
11164 }
11165 clear_metadata_buffer(reprocParam);
11166
11167 /*we need to update the frame number in the parameters*/
11168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11169 request->frame_number)) {
11170 LOGE("Failed to set the frame number in the parameters");
11171 return BAD_VALUE;
11172 }
11173
11174 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11175 if (rc < 0) {
11176 LOGE("Failed to translate reproc request");
11177 return rc;
11178 }
11179
11180 CameraMetadata frame_settings;
11181 frame_settings = request->settings;
11182 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11183 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11184 int32_t *crop_count =
11185 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11186 int32_t *crop_data =
11187 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11188 int32_t *roi_map =
11189 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11190 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11191 cam_crop_data_t crop_meta;
11192 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11193 crop_meta.num_of_streams = 1;
11194 crop_meta.crop_info[0].crop.left = crop_data[0];
11195 crop_meta.crop_info[0].crop.top = crop_data[1];
11196 crop_meta.crop_info[0].crop.width = crop_data[2];
11197 crop_meta.crop_info[0].crop.height = crop_data[3];
11198
11199 crop_meta.crop_info[0].roi_map.left =
11200 roi_map[0];
11201 crop_meta.crop_info[0].roi_map.top =
11202 roi_map[1];
11203 crop_meta.crop_info[0].roi_map.width =
11204 roi_map[2];
11205 crop_meta.crop_info[0].roi_map.height =
11206 roi_map[3];
11207
11208 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11209 rc = BAD_VALUE;
11210 }
11211 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11212 request->input_buffer->stream,
11213 crop_meta.crop_info[0].crop.left,
11214 crop_meta.crop_info[0].crop.top,
11215 crop_meta.crop_info[0].crop.width,
11216 crop_meta.crop_info[0].crop.height);
11217 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11218 request->input_buffer->stream,
11219 crop_meta.crop_info[0].roi_map.left,
11220 crop_meta.crop_info[0].roi_map.top,
11221 crop_meta.crop_info[0].roi_map.width,
11222 crop_meta.crop_info[0].roi_map.height);
11223 } else {
11224 LOGE("Invalid reprocess crop count %d!", *crop_count);
11225 }
11226 } else {
11227 LOGE("No crop data from matching output stream");
11228 }
11229
11230 /* These settings are not needed for regular requests so handle them specially for
11231 reprocess requests; information needed for EXIF tags */
11232 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11233 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11234 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11235 if (NAME_NOT_FOUND != val) {
11236 uint32_t flashMode = (uint32_t)val;
11237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11238 rc = BAD_VALUE;
11239 }
11240 } else {
11241 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11242 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11243 }
11244 } else {
11245 LOGH("No flash mode in reprocess settings");
11246 }
11247
11248 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11249 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11251 rc = BAD_VALUE;
11252 }
11253 } else {
11254 LOGH("No flash state in reprocess settings");
11255 }
11256
11257 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11258 uint8_t *reprocessFlags =
11259 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11261 *reprocessFlags)) {
11262 rc = BAD_VALUE;
11263 }
11264 }
11265
Thierry Strudel54dc9782017-02-15 12:12:10 -080011266 // Add exif debug data to internal metadata
11267 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11268 mm_jpeg_debug_exif_params_t *debug_params =
11269 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11270 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11271 // AE
11272 if (debug_params->ae_debug_params_valid == TRUE) {
11273 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11274 debug_params->ae_debug_params);
11275 }
11276 // AWB
11277 if (debug_params->awb_debug_params_valid == TRUE) {
11278 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11279 debug_params->awb_debug_params);
11280 }
11281 // AF
11282 if (debug_params->af_debug_params_valid == TRUE) {
11283 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11284 debug_params->af_debug_params);
11285 }
11286 // ASD
11287 if (debug_params->asd_debug_params_valid == TRUE) {
11288 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11289 debug_params->asd_debug_params);
11290 }
11291 // Stats
11292 if (debug_params->stats_debug_params_valid == TRUE) {
11293 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11294 debug_params->stats_debug_params);
11295 }
11296 // BE Stats
11297 if (debug_params->bestats_debug_params_valid == TRUE) {
11298 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11299 debug_params->bestats_debug_params);
11300 }
11301 // BHIST
11302 if (debug_params->bhist_debug_params_valid == TRUE) {
11303 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11304 debug_params->bhist_debug_params);
11305 }
11306 // 3A Tuning
11307 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11308 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11309 debug_params->q3a_tuning_debug_params);
11310 }
11311 }
11312
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011313 // Add metadata which reprocess needs
11314 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11315 cam_reprocess_info_t *repro_info =
11316 (cam_reprocess_info_t *)frame_settings.find
11317 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011318 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011319 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011320 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011321 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011322 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011323 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011324 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011325 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011326 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011327 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011328 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011329 repro_info->pipeline_flip);
11330 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11331 repro_info->af_roi);
11332 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11333 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11335 CAM_INTF_PARM_ROTATION metadata then has been added in
11336 translateToHalMetadata. HAL need to keep this new rotation
11337 metadata. Otherwise, the old rotation info saved in the vendor tag
11338 would be used */
11339 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11340 CAM_INTF_PARM_ROTATION, reprocParam) {
11341 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11342 } else {
11343 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011344 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011345 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011346 }
11347
11348 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11349 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11350 roi.width and roi.height would be the final JPEG size.
11351 For now, HAL only checks this for reprocess request */
11352 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11353 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11354 uint8_t *enable =
11355 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11356 if (*enable == TRUE) {
11357 int32_t *crop_data =
11358 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11359 cam_stream_crop_info_t crop_meta;
11360 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11361 crop_meta.stream_id = 0;
11362 crop_meta.crop.left = crop_data[0];
11363 crop_meta.crop.top = crop_data[1];
11364 crop_meta.crop.width = crop_data[2];
11365 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011366 // The JPEG crop roi should match cpp output size
11367 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11368 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11369 crop_meta.roi_map.left = 0;
11370 crop_meta.roi_map.top = 0;
11371 crop_meta.roi_map.width = cpp_crop->crop.width;
11372 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011373 }
11374 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11375 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011376 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011377 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011378 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11379 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011380 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011381 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11382
11383 // Add JPEG scale information
11384 cam_dimension_t scale_dim;
11385 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11386 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11387 int32_t *roi =
11388 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11389 scale_dim.width = roi[2];
11390 scale_dim.height = roi[3];
11391 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11392 scale_dim);
11393 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11394 scale_dim.width, scale_dim.height, mCameraId);
11395 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011396 }
11397 }
11398
11399 return rc;
11400}
11401
11402/*===========================================================================
11403 * FUNCTION : saveRequestSettings
11404 *
11405 * DESCRIPTION: Add any settings that might have changed to the request settings
11406 * and save the settings to be applied on the frame
11407 *
11408 * PARAMETERS :
11409 * @jpegMetadata : the extracted and/or modified jpeg metadata
11410 * @request : request with initial settings
11411 *
11412 * RETURN :
11413 * camera_metadata_t* : pointer to the saved request settings
11414 *==========================================================================*/
11415camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11416 const CameraMetadata &jpegMetadata,
11417 camera3_capture_request_t *request)
11418{
11419 camera_metadata_t *resultMetadata;
11420 CameraMetadata camMetadata;
11421 camMetadata = request->settings;
11422
11423 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11424 int32_t thumbnail_size[2];
11425 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11426 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11427 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11428 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11429 }
11430
11431 if (request->input_buffer != NULL) {
11432 uint8_t reprocessFlags = 1;
11433 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11434 (uint8_t*)&reprocessFlags,
11435 sizeof(reprocessFlags));
11436 }
11437
11438 resultMetadata = camMetadata.release();
11439 return resultMetadata;
11440}
11441
11442/*===========================================================================
11443 * FUNCTION : setHalFpsRange
11444 *
11445 * DESCRIPTION: set FPS range parameter
11446 *
11447 *
11448 * PARAMETERS :
11449 * @settings : Metadata from framework
11450 * @hal_metadata: Metadata buffer
11451 *
11452 *
11453 * RETURN : success: NO_ERROR
11454 * failure:
11455 *==========================================================================*/
11456int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11457 metadata_buffer_t *hal_metadata)
11458{
11459 int32_t rc = NO_ERROR;
11460 cam_fps_range_t fps_range;
11461 fps_range.min_fps = (float)
11462 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11463 fps_range.max_fps = (float)
11464 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11465 fps_range.video_min_fps = fps_range.min_fps;
11466 fps_range.video_max_fps = fps_range.max_fps;
11467
11468 LOGD("aeTargetFpsRange fps: [%f %f]",
11469 fps_range.min_fps, fps_range.max_fps);
11470 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11471 * follows:
11472 * ---------------------------------------------------------------|
11473 * Video stream is absent in configure_streams |
11474 * (Camcorder preview before the first video record |
11475 * ---------------------------------------------------------------|
11476 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11477 * | | | vid_min/max_fps|
11478 * ---------------------------------------------------------------|
11479 * NO | [ 30, 240] | 240 | [240, 240] |
11480 * |-------------|-------------|----------------|
11481 * | [240, 240] | 240 | [240, 240] |
11482 * ---------------------------------------------------------------|
11483 * Video stream is present in configure_streams |
11484 * ---------------------------------------------------------------|
11485 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11486 * | | | vid_min/max_fps|
11487 * ---------------------------------------------------------------|
11488 * NO | [ 30, 240] | 240 | [240, 240] |
11489 * (camcorder prev |-------------|-------------|----------------|
11490 * after video rec | [240, 240] | 240 | [240, 240] |
11491 * is stopped) | | | |
11492 * ---------------------------------------------------------------|
11493 * YES | [ 30, 240] | 240 | [240, 240] |
11494 * |-------------|-------------|----------------|
11495 * | [240, 240] | 240 | [240, 240] |
11496 * ---------------------------------------------------------------|
11497 * When Video stream is absent in configure_streams,
11498 * preview fps = sensor_fps / batchsize
11499 * Eg: for 240fps at batchSize 4, preview = 60fps
11500 * for 120fps at batchSize 4, preview = 30fps
11501 *
11502 * When video stream is present in configure_streams, preview fps is as per
11503 * the ratio of preview buffers to video buffers requested in process
11504 * capture request
11505 */
11506 mBatchSize = 0;
11507 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11508 fps_range.min_fps = fps_range.video_max_fps;
11509 fps_range.video_min_fps = fps_range.video_max_fps;
11510 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11511 fps_range.max_fps);
11512 if (NAME_NOT_FOUND != val) {
11513 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11515 return BAD_VALUE;
11516 }
11517
11518 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11519 /* If batchmode is currently in progress and the fps changes,
11520 * set the flag to restart the sensor */
11521 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11522 (mHFRVideoFps != fps_range.max_fps)) {
11523 mNeedSensorRestart = true;
11524 }
11525 mHFRVideoFps = fps_range.max_fps;
11526 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11527 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11528 mBatchSize = MAX_HFR_BATCH_SIZE;
11529 }
11530 }
11531 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11532
11533 }
11534 } else {
11535 /* HFR mode is session param in backend/ISP. This should be reset when
11536 * in non-HFR mode */
11537 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11539 return BAD_VALUE;
11540 }
11541 }
11542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11543 return BAD_VALUE;
11544 }
11545 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11546 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11547 return rc;
11548}
11549
11550/*===========================================================================
11551 * FUNCTION : translateToHalMetadata
11552 *
11553 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11554 *
11555 *
11556 * PARAMETERS :
11557 * @request : request sent from framework
11558 *
11559 *
11560 * RETURN : success: NO_ERROR
11561 * failure:
11562 *==========================================================================*/
11563int QCamera3HardwareInterface::translateToHalMetadata
11564 (const camera3_capture_request_t *request,
11565 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011566 uint32_t snapshotStreamId) {
11567 if (request == nullptr || hal_metadata == nullptr) {
11568 return BAD_VALUE;
11569 }
11570
11571 int64_t minFrameDuration = getMinFrameDuration(request);
11572
11573 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11574 minFrameDuration);
11575}
11576
11577int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11578 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11579 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11580
Thierry Strudel3d639192016-09-09 11:52:26 -070011581 int rc = 0;
11582 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011583 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011584
11585 /* Do not change the order of the following list unless you know what you are
11586 * doing.
11587 * The order is laid out in such a way that parameters in the front of the table
11588 * may be used to override the parameters later in the table. Examples are:
11589 * 1. META_MODE should precede AEC/AWB/AF MODE
11590 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11591 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11592 * 4. Any mode should precede it's corresponding settings
11593 */
11594 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11595 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11597 rc = BAD_VALUE;
11598 }
11599 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11600 if (rc != NO_ERROR) {
11601 LOGE("extractSceneMode failed");
11602 }
11603 }
11604
11605 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11606 uint8_t fwk_aeMode =
11607 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11608 uint8_t aeMode;
11609 int32_t redeye;
11610
11611 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11612 aeMode = CAM_AE_MODE_OFF;
11613 } else {
11614 aeMode = CAM_AE_MODE_ON;
11615 }
11616 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11617 redeye = 1;
11618 } else {
11619 redeye = 0;
11620 }
11621
11622 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11623 fwk_aeMode);
11624 if (NAME_NOT_FOUND != val) {
11625 int32_t flashMode = (int32_t)val;
11626 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11627 }
11628
11629 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11631 rc = BAD_VALUE;
11632 }
11633 }
11634
11635 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11636 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11637 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11638 fwk_whiteLevel);
11639 if (NAME_NOT_FOUND != val) {
11640 uint8_t whiteLevel = (uint8_t)val;
11641 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11642 rc = BAD_VALUE;
11643 }
11644 }
11645 }
11646
11647 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11648 uint8_t fwk_cacMode =
11649 frame_settings.find(
11650 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11651 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11652 fwk_cacMode);
11653 if (NAME_NOT_FOUND != val) {
11654 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11655 bool entryAvailable = FALSE;
11656 // Check whether Frameworks set CAC mode is supported in device or not
11657 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11658 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11659 entryAvailable = TRUE;
11660 break;
11661 }
11662 }
11663 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11664 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11665 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11666 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11667 if (entryAvailable == FALSE) {
11668 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11669 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11670 } else {
11671 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11672 // High is not supported and so set the FAST as spec say's underlying
11673 // device implementation can be the same for both modes.
11674 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11675 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11676 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11677 // in order to avoid the fps drop due to high quality
11678 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11679 } else {
11680 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11681 }
11682 }
11683 }
11684 LOGD("Final cacMode is %d", cacMode);
11685 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11686 rc = BAD_VALUE;
11687 }
11688 } else {
11689 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11690 }
11691 }
11692
Thierry Strudel2896d122017-02-23 19:18:03 -080011693 char af_value[PROPERTY_VALUE_MAX];
11694 property_get("persist.camera.af.infinity", af_value, "0");
11695
Jason Lee84ae9972017-02-24 13:24:24 -080011696 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011697 if (atoi(af_value) == 0) {
11698 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011699 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011700 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11701 fwk_focusMode);
11702 if (NAME_NOT_FOUND != val) {
11703 uint8_t focusMode = (uint8_t)val;
11704 LOGD("set focus mode %d", focusMode);
11705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11706 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11707 rc = BAD_VALUE;
11708 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011709 }
11710 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011711 } else {
11712 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11713 LOGE("Focus forced to infinity %d", focusMode);
11714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11715 rc = BAD_VALUE;
11716 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011717 }
11718
Jason Lee84ae9972017-02-24 13:24:24 -080011719 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11720 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011721 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11723 focalDistance)) {
11724 rc = BAD_VALUE;
11725 }
11726 }
11727
11728 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11729 uint8_t fwk_antibandingMode =
11730 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11731 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11732 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11733 if (NAME_NOT_FOUND != val) {
11734 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011735 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11736 if (m60HzZone) {
11737 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11738 } else {
11739 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11740 }
11741 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011742 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11743 hal_antibandingMode)) {
11744 rc = BAD_VALUE;
11745 }
11746 }
11747 }
11748
11749 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11750 int32_t expCompensation = frame_settings.find(
11751 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11752 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11753 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11754 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11755 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011756 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11758 expCompensation)) {
11759 rc = BAD_VALUE;
11760 }
11761 }
11762
11763 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11764 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11766 rc = BAD_VALUE;
11767 }
11768 }
11769 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11770 rc = setHalFpsRange(frame_settings, hal_metadata);
11771 if (rc != NO_ERROR) {
11772 LOGE("setHalFpsRange failed");
11773 }
11774 }
11775
11776 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11777 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11778 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11779 rc = BAD_VALUE;
11780 }
11781 }
11782
11783 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11784 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11785 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11786 fwk_effectMode);
11787 if (NAME_NOT_FOUND != val) {
11788 uint8_t effectMode = (uint8_t)val;
11789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11790 rc = BAD_VALUE;
11791 }
11792 }
11793 }
11794
11795 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11796 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11798 colorCorrectMode)) {
11799 rc = BAD_VALUE;
11800 }
11801 }
11802
11803 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11804 cam_color_correct_gains_t colorCorrectGains;
11805 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11806 colorCorrectGains.gains[i] =
11807 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11808 }
11809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11810 colorCorrectGains)) {
11811 rc = BAD_VALUE;
11812 }
11813 }
11814
11815 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11816 cam_color_correct_matrix_t colorCorrectTransform;
11817 cam_rational_type_t transform_elem;
11818 size_t num = 0;
11819 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11820 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11821 transform_elem.numerator =
11822 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11823 transform_elem.denominator =
11824 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11825 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11826 num++;
11827 }
11828 }
11829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11830 colorCorrectTransform)) {
11831 rc = BAD_VALUE;
11832 }
11833 }
11834
11835 cam_trigger_t aecTrigger;
11836 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11837 aecTrigger.trigger_id = -1;
11838 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11839 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11840 aecTrigger.trigger =
11841 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11842 aecTrigger.trigger_id =
11843 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11844 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11845 aecTrigger)) {
11846 rc = BAD_VALUE;
11847 }
11848 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11849 aecTrigger.trigger, aecTrigger.trigger_id);
11850 }
11851
11852 /*af_trigger must come with a trigger id*/
11853 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11854 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11855 cam_trigger_t af_trigger;
11856 af_trigger.trigger =
11857 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11858 af_trigger.trigger_id =
11859 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11861 rc = BAD_VALUE;
11862 }
11863 LOGD("AfTrigger: %d AfTriggerID: %d",
11864 af_trigger.trigger, af_trigger.trigger_id);
11865 }
11866
11867 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11868 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11870 rc = BAD_VALUE;
11871 }
11872 }
11873 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11874 cam_edge_application_t edge_application;
11875 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011876
Thierry Strudel3d639192016-09-09 11:52:26 -070011877 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11878 edge_application.sharpness = 0;
11879 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011880 edge_application.sharpness =
11881 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11882 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11883 int32_t sharpness =
11884 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11885 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11886 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11887 LOGD("Setting edge mode sharpness %d", sharpness);
11888 edge_application.sharpness = sharpness;
11889 }
11890 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011891 }
11892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11893 rc = BAD_VALUE;
11894 }
11895 }
11896
11897 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11898 int32_t respectFlashMode = 1;
11899 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11900 uint8_t fwk_aeMode =
11901 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11902 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11903 respectFlashMode = 0;
11904 LOGH("AE Mode controls flash, ignore android.flash.mode");
11905 }
11906 }
11907 if (respectFlashMode) {
11908 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11909 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11910 LOGH("flash mode after mapping %d", val);
11911 // To check: CAM_INTF_META_FLASH_MODE usage
11912 if (NAME_NOT_FOUND != val) {
11913 uint8_t flashMode = (uint8_t)val;
11914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11915 rc = BAD_VALUE;
11916 }
11917 }
11918 }
11919 }
11920
11921 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11922 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11924 rc = BAD_VALUE;
11925 }
11926 }
11927
11928 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11929 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11931 flashFiringTime)) {
11932 rc = BAD_VALUE;
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11937 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11939 hotPixelMode)) {
11940 rc = BAD_VALUE;
11941 }
11942 }
11943
11944 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11945 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11947 lensAperture)) {
11948 rc = BAD_VALUE;
11949 }
11950 }
11951
11952 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11953 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11955 filterDensity)) {
11956 rc = BAD_VALUE;
11957 }
11958 }
11959
11960 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11961 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11962 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11963 focalLength)) {
11964 rc = BAD_VALUE;
11965 }
11966 }
11967
11968 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11969 uint8_t optStabMode =
11970 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11972 optStabMode)) {
11973 rc = BAD_VALUE;
11974 }
11975 }
11976
11977 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11978 uint8_t videoStabMode =
11979 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11980 LOGD("videoStabMode from APP = %d", videoStabMode);
11981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11982 videoStabMode)) {
11983 rc = BAD_VALUE;
11984 }
11985 }
11986
11987
11988 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11989 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11991 noiseRedMode)) {
11992 rc = BAD_VALUE;
11993 }
11994 }
11995
11996 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11997 float reprocessEffectiveExposureFactor =
11998 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12000 reprocessEffectiveExposureFactor)) {
12001 rc = BAD_VALUE;
12002 }
12003 }
12004
12005 cam_crop_region_t scalerCropRegion;
12006 bool scalerCropSet = false;
12007 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12008 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12009 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12010 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12011 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12012
12013 // Map coordinate system from active array to sensor output.
12014 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12015 scalerCropRegion.width, scalerCropRegion.height);
12016
12017 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12018 scalerCropRegion)) {
12019 rc = BAD_VALUE;
12020 }
12021 scalerCropSet = true;
12022 }
12023
12024 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12025 int64_t sensorExpTime =
12026 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12027 LOGD("setting sensorExpTime %lld", sensorExpTime);
12028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12029 sensorExpTime)) {
12030 rc = BAD_VALUE;
12031 }
12032 }
12033
12034 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12035 int64_t sensorFrameDuration =
12036 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012037 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12038 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12039 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12040 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12042 sensorFrameDuration)) {
12043 rc = BAD_VALUE;
12044 }
12045 }
12046
12047 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12048 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12049 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12050 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12051 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12052 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12053 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12055 sensorSensitivity)) {
12056 rc = BAD_VALUE;
12057 }
12058 }
12059
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012060#ifndef USE_HAL_3_3
12061 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12062 int32_t ispSensitivity =
12063 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12064 if (ispSensitivity <
12065 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12066 ispSensitivity =
12067 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12068 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12069 }
12070 if (ispSensitivity >
12071 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12072 ispSensitivity =
12073 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12074 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12075 }
12076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12077 ispSensitivity)) {
12078 rc = BAD_VALUE;
12079 }
12080 }
12081#endif
12082
Thierry Strudel3d639192016-09-09 11:52:26 -070012083 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12084 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12086 rc = BAD_VALUE;
12087 }
12088 }
12089
12090 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12091 uint8_t fwk_facedetectMode =
12092 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12093
12094 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12095 fwk_facedetectMode);
12096
12097 if (NAME_NOT_FOUND != val) {
12098 uint8_t facedetectMode = (uint8_t)val;
12099 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12100 facedetectMode)) {
12101 rc = BAD_VALUE;
12102 }
12103 }
12104 }
12105
Thierry Strudel54dc9782017-02-15 12:12:10 -080012106 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012107 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012108 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12110 histogramMode)) {
12111 rc = BAD_VALUE;
12112 }
12113 }
12114
12115 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12116 uint8_t sharpnessMapMode =
12117 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12119 sharpnessMapMode)) {
12120 rc = BAD_VALUE;
12121 }
12122 }
12123
12124 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12125 uint8_t tonemapMode =
12126 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12128 rc = BAD_VALUE;
12129 }
12130 }
12131 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12132 /*All tonemap channels will have the same number of points*/
12133 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12134 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12135 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12136 cam_rgb_tonemap_curves tonemapCurves;
12137 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12138 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12139 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12140 tonemapCurves.tonemap_points_cnt,
12141 CAM_MAX_TONEMAP_CURVE_SIZE);
12142 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12143 }
12144
12145 /* ch0 = G*/
12146 size_t point = 0;
12147 cam_tonemap_curve_t tonemapCurveGreen;
12148 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12149 for (size_t j = 0; j < 2; j++) {
12150 tonemapCurveGreen.tonemap_points[i][j] =
12151 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12152 point++;
12153 }
12154 }
12155 tonemapCurves.curves[0] = tonemapCurveGreen;
12156
12157 /* ch 1 = B */
12158 point = 0;
12159 cam_tonemap_curve_t tonemapCurveBlue;
12160 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12161 for (size_t j = 0; j < 2; j++) {
12162 tonemapCurveBlue.tonemap_points[i][j] =
12163 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12164 point++;
12165 }
12166 }
12167 tonemapCurves.curves[1] = tonemapCurveBlue;
12168
12169 /* ch 2 = R */
12170 point = 0;
12171 cam_tonemap_curve_t tonemapCurveRed;
12172 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12173 for (size_t j = 0; j < 2; j++) {
12174 tonemapCurveRed.tonemap_points[i][j] =
12175 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12176 point++;
12177 }
12178 }
12179 tonemapCurves.curves[2] = tonemapCurveRed;
12180
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12182 tonemapCurves)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12188 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12190 captureIntent)) {
12191 rc = BAD_VALUE;
12192 }
12193 }
12194
12195 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12196 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12197 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12198 blackLevelLock)) {
12199 rc = BAD_VALUE;
12200 }
12201 }
12202
12203 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12204 uint8_t lensShadingMapMode =
12205 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12207 lensShadingMapMode)) {
12208 rc = BAD_VALUE;
12209 }
12210 }
12211
12212 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12213 cam_area_t roi;
12214 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012215 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012216
12217 // Map coordinate system from active array to sensor output.
12218 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12219 roi.rect.height);
12220
12221 if (scalerCropSet) {
12222 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12223 }
12224 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12225 rc = BAD_VALUE;
12226 }
12227 }
12228
12229 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12230 cam_area_t roi;
12231 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012232 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012233
12234 // Map coordinate system from active array to sensor output.
12235 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12236 roi.rect.height);
12237
12238 if (scalerCropSet) {
12239 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12240 }
12241 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12242 rc = BAD_VALUE;
12243 }
12244 }
12245
12246 // CDS for non-HFR non-video mode
12247 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12248 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12249 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12250 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12251 LOGE("Invalid CDS mode %d!", *fwk_cds);
12252 } else {
12253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12254 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12255 rc = BAD_VALUE;
12256 }
12257 }
12258 }
12259
Thierry Strudel04e026f2016-10-10 11:27:36 -070012260 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012261 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012262 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012263 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12264 }
12265 if (m_bVideoHdrEnabled)
12266 vhdr = CAM_VIDEO_HDR_MODE_ON;
12267
Thierry Strudel54dc9782017-02-15 12:12:10 -080012268 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12269
12270 if(vhdr != curr_hdr_state)
12271 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12272
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012273 rc = setVideoHdrMode(mParameters, vhdr);
12274 if (rc != NO_ERROR) {
12275 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012276 }
12277
12278 //IR
12279 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12280 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12281 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012282 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12283 uint8_t isIRon = 0;
12284
12285 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012286 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12287 LOGE("Invalid IR mode %d!", fwk_ir);
12288 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012289 if(isIRon != curr_ir_state )
12290 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12291
Thierry Strudel04e026f2016-10-10 11:27:36 -070012292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12293 CAM_INTF_META_IR_MODE, fwk_ir)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297 }
12298
Thierry Strudel54dc9782017-02-15 12:12:10 -080012299 //Binning Correction Mode
12300 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12301 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12302 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12303 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12304 || (0 > fwk_binning_correction)) {
12305 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12306 } else {
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12308 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312 }
12313
Thierry Strudel269c81a2016-10-12 12:13:59 -070012314 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12315 float aec_speed;
12316 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12317 LOGD("AEC Speed :%f", aec_speed);
12318 if ( aec_speed < 0 ) {
12319 LOGE("Invalid AEC mode %f!", aec_speed);
12320 } else {
12321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12322 aec_speed)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326 }
12327
12328 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12329 float awb_speed;
12330 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12331 LOGD("AWB Speed :%f", awb_speed);
12332 if ( awb_speed < 0 ) {
12333 LOGE("Invalid AWB mode %f!", awb_speed);
12334 } else {
12335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12336 awb_speed)) {
12337 rc = BAD_VALUE;
12338 }
12339 }
12340 }
12341
Thierry Strudel3d639192016-09-09 11:52:26 -070012342 // TNR
12343 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12344 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12345 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012346 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012347 cam_denoise_param_t tnr;
12348 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12349 tnr.process_plates =
12350 (cam_denoise_process_type_t)frame_settings.find(
12351 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12352 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012353
12354 if(b_TnrRequested != curr_tnr_state)
12355 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12356
Thierry Strudel3d639192016-09-09 11:52:26 -070012357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361
Thierry Strudel54dc9782017-02-15 12:12:10 -080012362 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012363 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012364 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12366 *exposure_metering_mode)) {
12367 rc = BAD_VALUE;
12368 }
12369 }
12370
Thierry Strudel3d639192016-09-09 11:52:26 -070012371 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12372 int32_t fwk_testPatternMode =
12373 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12374 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12375 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12376
12377 if (NAME_NOT_FOUND != testPatternMode) {
12378 cam_test_pattern_data_t testPatternData;
12379 memset(&testPatternData, 0, sizeof(testPatternData));
12380 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12381 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12382 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12383 int32_t *fwk_testPatternData =
12384 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12385 testPatternData.r = fwk_testPatternData[0];
12386 testPatternData.b = fwk_testPatternData[3];
12387 switch (gCamCapability[mCameraId]->color_arrangement) {
12388 case CAM_FILTER_ARRANGEMENT_RGGB:
12389 case CAM_FILTER_ARRANGEMENT_GRBG:
12390 testPatternData.gr = fwk_testPatternData[1];
12391 testPatternData.gb = fwk_testPatternData[2];
12392 break;
12393 case CAM_FILTER_ARRANGEMENT_GBRG:
12394 case CAM_FILTER_ARRANGEMENT_BGGR:
12395 testPatternData.gr = fwk_testPatternData[2];
12396 testPatternData.gb = fwk_testPatternData[1];
12397 break;
12398 default:
12399 LOGE("color arrangement %d is not supported",
12400 gCamCapability[mCameraId]->color_arrangement);
12401 break;
12402 }
12403 }
12404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12405 testPatternData)) {
12406 rc = BAD_VALUE;
12407 }
12408 } else {
12409 LOGE("Invalid framework sensor test pattern mode %d",
12410 fwk_testPatternMode);
12411 }
12412 }
12413
12414 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12415 size_t count = 0;
12416 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12417 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12418 gps_coords.data.d, gps_coords.count, count);
12419 if (gps_coords.count != count) {
12420 rc = BAD_VALUE;
12421 }
12422 }
12423
12424 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12425 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12426 size_t count = 0;
12427 const char *gps_methods_src = (const char *)
12428 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12429 memset(gps_methods, '\0', sizeof(gps_methods));
12430 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12431 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12432 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12433 if (GPS_PROCESSING_METHOD_SIZE != count) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437
12438 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12439 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12441 gps_timestamp)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445
12446 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12447 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12448 cam_rotation_info_t rotation_info;
12449 if (orientation == 0) {
12450 rotation_info.rotation = ROTATE_0;
12451 } else if (orientation == 90) {
12452 rotation_info.rotation = ROTATE_90;
12453 } else if (orientation == 180) {
12454 rotation_info.rotation = ROTATE_180;
12455 } else if (orientation == 270) {
12456 rotation_info.rotation = ROTATE_270;
12457 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012458 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012459 rotation_info.streamId = snapshotStreamId;
12460 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12462 rc = BAD_VALUE;
12463 }
12464 }
12465
12466 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12467 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12469 rc = BAD_VALUE;
12470 }
12471 }
12472
12473 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12474 uint32_t thumb_quality = (uint32_t)
12475 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12476 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12477 thumb_quality)) {
12478 rc = BAD_VALUE;
12479 }
12480 }
12481
12482 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12483 cam_dimension_t dim;
12484 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12485 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12487 rc = BAD_VALUE;
12488 }
12489 }
12490
12491 // Internal metadata
12492 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12493 size_t count = 0;
12494 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12495 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12496 privatedata.data.i32, privatedata.count, count);
12497 if (privatedata.count != count) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012502 // ISO/Exposure Priority
12503 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12504 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12505 cam_priority_mode_t mode =
12506 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12507 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12508 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12509 use_iso_exp_pty.previewOnly = FALSE;
12510 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12511 use_iso_exp_pty.value = *ptr;
12512
12513 if(CAM_ISO_PRIORITY == mode) {
12514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12515 use_iso_exp_pty)) {
12516 rc = BAD_VALUE;
12517 }
12518 }
12519 else {
12520 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12521 use_iso_exp_pty)) {
12522 rc = BAD_VALUE;
12523 }
12524 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012525
12526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12527 rc = BAD_VALUE;
12528 }
12529 }
12530 } else {
12531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12532 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012533 }
12534 }
12535
12536 // Saturation
12537 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12538 int32_t* use_saturation =
12539 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544
Thierry Strudel3d639192016-09-09 11:52:26 -070012545 // EV step
12546 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12547 gCamCapability[mCameraId]->exp_compensation_step)) {
12548 rc = BAD_VALUE;
12549 }
12550
12551 // CDS info
12552 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12553 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12554 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12555
12556 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12557 CAM_INTF_META_CDS_DATA, *cdsData)) {
12558 rc = BAD_VALUE;
12559 }
12560 }
12561
Shuzhen Wang19463d72016-03-08 11:09:52 -080012562 // Hybrid AE
12563 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12564 uint8_t *hybrid_ae = (uint8_t *)
12565 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12566
12567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12568 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12569 rc = BAD_VALUE;
12570 }
12571 }
12572
Shuzhen Wang14415f52016-11-16 18:26:18 -080012573 // Histogram
12574 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12575 uint8_t histogramMode =
12576 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12578 histogramMode)) {
12579 rc = BAD_VALUE;
12580 }
12581 }
12582
12583 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12584 int32_t histogramBins =
12585 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12587 histogramBins)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012592 // Tracking AF
12593 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12594 uint8_t trackingAfTrigger =
12595 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12597 trackingAfTrigger)) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
Thierry Strudel3d639192016-09-09 11:52:26 -070012602 return rc;
12603}
12604
12605/*===========================================================================
12606 * FUNCTION : captureResultCb
12607 *
12608 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12609 *
12610 * PARAMETERS :
12611 * @frame : frame information from mm-camera-interface
12612 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12613 * @userdata: userdata
12614 *
12615 * RETURN : NONE
12616 *==========================================================================*/
12617void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12618 camera3_stream_buffer_t *buffer,
12619 uint32_t frame_number, bool isInputBuffer, void *userdata)
12620{
12621 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12622 if (hw == NULL) {
12623 LOGE("Invalid hw %p", hw);
12624 return;
12625 }
12626
12627 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12628 return;
12629}
12630
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012631/*===========================================================================
12632 * FUNCTION : setBufferErrorStatus
12633 *
12634 * DESCRIPTION: Callback handler for channels to report any buffer errors
12635 *
12636 * PARAMETERS :
12637 * @ch : Channel on which buffer error is reported from
12638 * @frame_number : frame number on which buffer error is reported on
12639 * @buffer_status : buffer error status
12640 * @userdata: userdata
12641 *
12642 * RETURN : NONE
12643 *==========================================================================*/
12644void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12645 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12646{
12647 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12648 if (hw == NULL) {
12649 LOGE("Invalid hw %p", hw);
12650 return;
12651 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012652
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012653 hw->setBufferErrorStatus(ch, frame_number, err);
12654 return;
12655}
12656
12657void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12658 uint32_t frameNumber, camera3_buffer_status_t err)
12659{
12660 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12661 pthread_mutex_lock(&mMutex);
12662
12663 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12664 if (req.frame_number != frameNumber)
12665 continue;
12666 for (auto& k : req.mPendingBufferList) {
12667 if(k.stream->priv == ch) {
12668 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12669 }
12670 }
12671 }
12672
12673 pthread_mutex_unlock(&mMutex);
12674 return;
12675}
Thierry Strudel3d639192016-09-09 11:52:26 -070012676/*===========================================================================
12677 * FUNCTION : initialize
12678 *
12679 * DESCRIPTION: Pass framework callback pointers to HAL
12680 *
12681 * PARAMETERS :
12682 *
12683 *
12684 * RETURN : Success : 0
12685 * Failure: -ENODEV
12686 *==========================================================================*/
12687
12688int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12689 const camera3_callback_ops_t *callback_ops)
12690{
12691 LOGD("E");
12692 QCamera3HardwareInterface *hw =
12693 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12694 if (!hw) {
12695 LOGE("NULL camera device");
12696 return -ENODEV;
12697 }
12698
12699 int rc = hw->initialize(callback_ops);
12700 LOGD("X");
12701 return rc;
12702}
12703
12704/*===========================================================================
12705 * FUNCTION : configure_streams
12706 *
12707 * DESCRIPTION:
12708 *
12709 * PARAMETERS :
12710 *
12711 *
12712 * RETURN : Success: 0
12713 * Failure: -EINVAL (if stream configuration is invalid)
12714 * -ENODEV (fatal error)
12715 *==========================================================================*/
12716
12717int QCamera3HardwareInterface::configure_streams(
12718 const struct camera3_device *device,
12719 camera3_stream_configuration_t *stream_list)
12720{
12721 LOGD("E");
12722 QCamera3HardwareInterface *hw =
12723 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12724 if (!hw) {
12725 LOGE("NULL camera device");
12726 return -ENODEV;
12727 }
12728 int rc = hw->configureStreams(stream_list);
12729 LOGD("X");
12730 return rc;
12731}
12732
12733/*===========================================================================
12734 * FUNCTION : construct_default_request_settings
12735 *
12736 * DESCRIPTION: Configure a settings buffer to meet the required use case
12737 *
12738 * PARAMETERS :
12739 *
12740 *
12741 * RETURN : Success: Return valid metadata
12742 * Failure: Return NULL
12743 *==========================================================================*/
12744const camera_metadata_t* QCamera3HardwareInterface::
12745 construct_default_request_settings(const struct camera3_device *device,
12746 int type)
12747{
12748
12749 LOGD("E");
12750 camera_metadata_t* fwk_metadata = NULL;
12751 QCamera3HardwareInterface *hw =
12752 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12753 if (!hw) {
12754 LOGE("NULL camera device");
12755 return NULL;
12756 }
12757
12758 fwk_metadata = hw->translateCapabilityToMetadata(type);
12759
12760 LOGD("X");
12761 return fwk_metadata;
12762}
12763
12764/*===========================================================================
12765 * FUNCTION : process_capture_request
12766 *
12767 * DESCRIPTION:
12768 *
12769 * PARAMETERS :
12770 *
12771 *
12772 * RETURN :
12773 *==========================================================================*/
12774int QCamera3HardwareInterface::process_capture_request(
12775 const struct camera3_device *device,
12776 camera3_capture_request_t *request)
12777{
12778 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012779 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012780 QCamera3HardwareInterface *hw =
12781 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12782 if (!hw) {
12783 LOGE("NULL camera device");
12784 return -EINVAL;
12785 }
12786
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012787 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012788 LOGD("X");
12789 return rc;
12790}
12791
12792/*===========================================================================
12793 * FUNCTION : dump
12794 *
12795 * DESCRIPTION:
12796 *
12797 * PARAMETERS :
12798 *
12799 *
12800 * RETURN :
12801 *==========================================================================*/
12802
12803void QCamera3HardwareInterface::dump(
12804 const struct camera3_device *device, int fd)
12805{
12806 /* Log level property is read when "adb shell dumpsys media.camera" is
12807 called so that the log level can be controlled without restarting
12808 the media server */
12809 getLogLevel();
12810
12811 LOGD("E");
12812 QCamera3HardwareInterface *hw =
12813 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12814 if (!hw) {
12815 LOGE("NULL camera device");
12816 return;
12817 }
12818
12819 hw->dump(fd);
12820 LOGD("X");
12821 return;
12822}
12823
12824/*===========================================================================
12825 * FUNCTION : flush
12826 *
12827 * DESCRIPTION:
12828 *
12829 * PARAMETERS :
12830 *
12831 *
12832 * RETURN :
12833 *==========================================================================*/
12834
12835int QCamera3HardwareInterface::flush(
12836 const struct camera3_device *device)
12837{
12838 int rc;
12839 LOGD("E");
12840 QCamera3HardwareInterface *hw =
12841 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12842 if (!hw) {
12843 LOGE("NULL camera device");
12844 return -EINVAL;
12845 }
12846
12847 pthread_mutex_lock(&hw->mMutex);
12848 // Validate current state
12849 switch (hw->mState) {
12850 case STARTED:
12851 /* valid state */
12852 break;
12853
12854 case ERROR:
12855 pthread_mutex_unlock(&hw->mMutex);
12856 hw->handleCameraDeviceError();
12857 return -ENODEV;
12858
12859 default:
12860 LOGI("Flush returned during state %d", hw->mState);
12861 pthread_mutex_unlock(&hw->mMutex);
12862 return 0;
12863 }
12864 pthread_mutex_unlock(&hw->mMutex);
12865
12866 rc = hw->flush(true /* restart channels */ );
12867 LOGD("X");
12868 return rc;
12869}
12870
12871/*===========================================================================
12872 * FUNCTION : close_camera_device
12873 *
12874 * DESCRIPTION:
12875 *
12876 * PARAMETERS :
12877 *
12878 *
12879 * RETURN :
12880 *==========================================================================*/
12881int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12882{
12883 int ret = NO_ERROR;
12884 QCamera3HardwareInterface *hw =
12885 reinterpret_cast<QCamera3HardwareInterface *>(
12886 reinterpret_cast<camera3_device_t *>(device)->priv);
12887 if (!hw) {
12888 LOGE("NULL camera device");
12889 return BAD_VALUE;
12890 }
12891
12892 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12893 delete hw;
12894 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012895 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012896 return ret;
12897}
12898
12899/*===========================================================================
12900 * FUNCTION : getWaveletDenoiseProcessPlate
12901 *
12902 * DESCRIPTION: query wavelet denoise process plate
12903 *
12904 * PARAMETERS : None
12905 *
12906 * RETURN : WNR prcocess plate value
12907 *==========================================================================*/
12908cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12909{
12910 char prop[PROPERTY_VALUE_MAX];
12911 memset(prop, 0, sizeof(prop));
12912 property_get("persist.denoise.process.plates", prop, "0");
12913 int processPlate = atoi(prop);
12914 switch(processPlate) {
12915 case 0:
12916 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12917 case 1:
12918 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12919 case 2:
12920 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12921 case 3:
12922 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12923 default:
12924 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12925 }
12926}
12927
12928
12929/*===========================================================================
12930 * FUNCTION : getTemporalDenoiseProcessPlate
12931 *
12932 * DESCRIPTION: query temporal denoise process plate
12933 *
12934 * PARAMETERS : None
12935 *
12936 * RETURN : TNR prcocess plate value
12937 *==========================================================================*/
12938cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12939{
12940 char prop[PROPERTY_VALUE_MAX];
12941 memset(prop, 0, sizeof(prop));
12942 property_get("persist.tnr.process.plates", prop, "0");
12943 int processPlate = atoi(prop);
12944 switch(processPlate) {
12945 case 0:
12946 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12947 case 1:
12948 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12949 case 2:
12950 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12951 case 3:
12952 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12953 default:
12954 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12955 }
12956}
12957
12958
12959/*===========================================================================
12960 * FUNCTION : extractSceneMode
12961 *
12962 * DESCRIPTION: Extract scene mode from frameworks set metadata
12963 *
12964 * PARAMETERS :
12965 * @frame_settings: CameraMetadata reference
12966 * @metaMode: ANDROID_CONTORL_MODE
12967 * @hal_metadata: hal metadata structure
12968 *
12969 * RETURN : None
12970 *==========================================================================*/
12971int32_t QCamera3HardwareInterface::extractSceneMode(
12972 const CameraMetadata &frame_settings, uint8_t metaMode,
12973 metadata_buffer_t *hal_metadata)
12974{
12975 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012976 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12977
12978 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12979 LOGD("Ignoring control mode OFF_KEEP_STATE");
12980 return NO_ERROR;
12981 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012982
12983 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12984 camera_metadata_ro_entry entry =
12985 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12986 if (0 == entry.count)
12987 return rc;
12988
12989 uint8_t fwk_sceneMode = entry.data.u8[0];
12990
12991 int val = lookupHalName(SCENE_MODES_MAP,
12992 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12993 fwk_sceneMode);
12994 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012995 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012996 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012997 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012998 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012999
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013000 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13001 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13002 }
13003
13004 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13005 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013006 cam_hdr_param_t hdr_params;
13007 hdr_params.hdr_enable = 1;
13008 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13009 hdr_params.hdr_need_1x = false;
13010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13011 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13012 rc = BAD_VALUE;
13013 }
13014 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013015
Thierry Strudel3d639192016-09-09 11:52:26 -070013016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13017 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13018 rc = BAD_VALUE;
13019 }
13020 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013021
13022 if (mForceHdrSnapshot) {
13023 cam_hdr_param_t hdr_params;
13024 hdr_params.hdr_enable = 1;
13025 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13026 hdr_params.hdr_need_1x = false;
13027 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13028 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13029 rc = BAD_VALUE;
13030 }
13031 }
13032
Thierry Strudel3d639192016-09-09 11:52:26 -070013033 return rc;
13034}
13035
13036/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013037 * FUNCTION : setVideoHdrMode
13038 *
13039 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13040 *
13041 * PARAMETERS :
13042 * @hal_metadata: hal metadata structure
13043 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13044 *
13045 * RETURN : None
13046 *==========================================================================*/
13047int32_t QCamera3HardwareInterface::setVideoHdrMode(
13048 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13049{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013050 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13051 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13052 }
13053
13054 LOGE("Invalid Video HDR mode %d!", vhdr);
13055 return BAD_VALUE;
13056}
13057
13058/*===========================================================================
13059 * FUNCTION : setSensorHDR
13060 *
13061 * DESCRIPTION: Enable/disable sensor HDR.
13062 *
13063 * PARAMETERS :
13064 * @hal_metadata: hal metadata structure
13065 * @enable: boolean whether to enable/disable sensor HDR
13066 *
13067 * RETURN : None
13068 *==========================================================================*/
13069int32_t QCamera3HardwareInterface::setSensorHDR(
13070 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13071{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013072 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013073 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13074
13075 if (enable) {
13076 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13077 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13078 #ifdef _LE_CAMERA_
13079 //Default to staggered HDR for IOT
13080 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13081 #else
13082 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13083 #endif
13084 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13085 }
13086
13087 bool isSupported = false;
13088 switch (sensor_hdr) {
13089 case CAM_SENSOR_HDR_IN_SENSOR:
13090 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13091 CAM_QCOM_FEATURE_SENSOR_HDR) {
13092 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013093 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013094 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013095 break;
13096 case CAM_SENSOR_HDR_ZIGZAG:
13097 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13098 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13099 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013100 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013101 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013102 break;
13103 case CAM_SENSOR_HDR_STAGGERED:
13104 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13105 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13106 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013107 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013108 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013109 break;
13110 case CAM_SENSOR_HDR_OFF:
13111 isSupported = true;
13112 LOGD("Turning off sensor HDR");
13113 break;
13114 default:
13115 LOGE("HDR mode %d not supported", sensor_hdr);
13116 rc = BAD_VALUE;
13117 break;
13118 }
13119
13120 if(isSupported) {
13121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13122 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13123 rc = BAD_VALUE;
13124 } else {
13125 if(!isVideoHdrEnable)
13126 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013127 }
13128 }
13129 return rc;
13130}
13131
13132/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013133 * FUNCTION : needRotationReprocess
13134 *
13135 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13136 *
13137 * PARAMETERS : none
13138 *
13139 * RETURN : true: needed
13140 * false: no need
13141 *==========================================================================*/
13142bool QCamera3HardwareInterface::needRotationReprocess()
13143{
13144 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13145 // current rotation is not zero, and pp has the capability to process rotation
13146 LOGH("need do reprocess for rotation");
13147 return true;
13148 }
13149
13150 return false;
13151}
13152
13153/*===========================================================================
13154 * FUNCTION : needReprocess
13155 *
13156 * DESCRIPTION: if reprocess in needed
13157 *
13158 * PARAMETERS : none
13159 *
13160 * RETURN : true: needed
13161 * false: no need
13162 *==========================================================================*/
13163bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13164{
13165 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13166 // TODO: add for ZSL HDR later
13167 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13168 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13169 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13170 return true;
13171 } else {
13172 LOGH("already post processed frame");
13173 return false;
13174 }
13175 }
13176 return needRotationReprocess();
13177}
13178
13179/*===========================================================================
13180 * FUNCTION : needJpegExifRotation
13181 *
13182 * DESCRIPTION: if rotation from jpeg is needed
13183 *
13184 * PARAMETERS : none
13185 *
13186 * RETURN : true: needed
13187 * false: no need
13188 *==========================================================================*/
13189bool QCamera3HardwareInterface::needJpegExifRotation()
13190{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013191 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013192 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13193 LOGD("Need use Jpeg EXIF Rotation");
13194 return true;
13195 }
13196 return false;
13197}
13198
13199/*===========================================================================
13200 * FUNCTION : addOfflineReprocChannel
13201 *
13202 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13203 * coming from input channel
13204 *
13205 * PARAMETERS :
13206 * @config : reprocess configuration
13207 * @inputChHandle : pointer to the input (source) channel
13208 *
13209 *
13210 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13211 *==========================================================================*/
13212QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13213 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13214{
13215 int32_t rc = NO_ERROR;
13216 QCamera3ReprocessChannel *pChannel = NULL;
13217
13218 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013219 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13220 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013221 if (NULL == pChannel) {
13222 LOGE("no mem for reprocess channel");
13223 return NULL;
13224 }
13225
13226 rc = pChannel->initialize(IS_TYPE_NONE);
13227 if (rc != NO_ERROR) {
13228 LOGE("init reprocess channel failed, ret = %d", rc);
13229 delete pChannel;
13230 return NULL;
13231 }
13232
13233 // pp feature config
13234 cam_pp_feature_config_t pp_config;
13235 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13236
13237 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13238 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13239 & CAM_QCOM_FEATURE_DSDN) {
13240 //Use CPP CDS incase h/w supports it.
13241 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13242 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13243 }
13244 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13245 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13246 }
13247
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013248 if (config.hdr_param.hdr_enable) {
13249 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13250 pp_config.hdr_param = config.hdr_param;
13251 }
13252
13253 if (mForceHdrSnapshot) {
13254 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13255 pp_config.hdr_param.hdr_enable = 1;
13256 pp_config.hdr_param.hdr_need_1x = 0;
13257 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13258 }
13259
Thierry Strudel3d639192016-09-09 11:52:26 -070013260 rc = pChannel->addReprocStreamsFromSource(pp_config,
13261 config,
13262 IS_TYPE_NONE,
13263 mMetadataChannel);
13264
13265 if (rc != NO_ERROR) {
13266 delete pChannel;
13267 return NULL;
13268 }
13269 return pChannel;
13270}
13271
13272/*===========================================================================
13273 * FUNCTION : getMobicatMask
13274 *
13275 * DESCRIPTION: returns mobicat mask
13276 *
13277 * PARAMETERS : none
13278 *
13279 * RETURN : mobicat mask
13280 *
13281 *==========================================================================*/
13282uint8_t QCamera3HardwareInterface::getMobicatMask()
13283{
13284 return m_MobicatMask;
13285}
13286
13287/*===========================================================================
13288 * FUNCTION : setMobicat
13289 *
13290 * DESCRIPTION: set Mobicat on/off.
13291 *
13292 * PARAMETERS :
13293 * @params : none
13294 *
13295 * RETURN : int32_t type of status
13296 * NO_ERROR -- success
13297 * none-zero failure code
13298 *==========================================================================*/
13299int32_t QCamera3HardwareInterface::setMobicat()
13300{
13301 char value [PROPERTY_VALUE_MAX];
13302 property_get("persist.camera.mobicat", value, "0");
13303 int32_t ret = NO_ERROR;
13304 uint8_t enableMobi = (uint8_t)atoi(value);
13305
13306 if (enableMobi) {
13307 tune_cmd_t tune_cmd;
13308 tune_cmd.type = SET_RELOAD_CHROMATIX;
13309 tune_cmd.module = MODULE_ALL;
13310 tune_cmd.value = TRUE;
13311 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13312 CAM_INTF_PARM_SET_VFE_COMMAND,
13313 tune_cmd);
13314
13315 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13316 CAM_INTF_PARM_SET_PP_COMMAND,
13317 tune_cmd);
13318 }
13319 m_MobicatMask = enableMobi;
13320
13321 return ret;
13322}
13323
13324/*===========================================================================
13325* FUNCTION : getLogLevel
13326*
13327* DESCRIPTION: Reads the log level property into a variable
13328*
13329* PARAMETERS :
13330* None
13331*
13332* RETURN :
13333* None
13334*==========================================================================*/
13335void QCamera3HardwareInterface::getLogLevel()
13336{
13337 char prop[PROPERTY_VALUE_MAX];
13338 uint32_t globalLogLevel = 0;
13339
13340 property_get("persist.camera.hal.debug", prop, "0");
13341 int val = atoi(prop);
13342 if (0 <= val) {
13343 gCamHal3LogLevel = (uint32_t)val;
13344 }
13345
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013346 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013347 gKpiDebugLevel = atoi(prop);
13348
13349 property_get("persist.camera.global.debug", prop, "0");
13350 val = atoi(prop);
13351 if (0 <= val) {
13352 globalLogLevel = (uint32_t)val;
13353 }
13354
13355 /* Highest log level among hal.logs and global.logs is selected */
13356 if (gCamHal3LogLevel < globalLogLevel)
13357 gCamHal3LogLevel = globalLogLevel;
13358
13359 return;
13360}
13361
13362/*===========================================================================
13363 * FUNCTION : validateStreamRotations
13364 *
13365 * DESCRIPTION: Check if the rotations requested are supported
13366 *
13367 * PARAMETERS :
13368 * @stream_list : streams to be configured
13369 *
13370 * RETURN : NO_ERROR on success
13371 * -EINVAL on failure
13372 *
13373 *==========================================================================*/
13374int QCamera3HardwareInterface::validateStreamRotations(
13375 camera3_stream_configuration_t *streamList)
13376{
13377 int rc = NO_ERROR;
13378
13379 /*
13380 * Loop through all streams requested in configuration
13381 * Check if unsupported rotations have been requested on any of them
13382 */
13383 for (size_t j = 0; j < streamList->num_streams; j++){
13384 camera3_stream_t *newStream = streamList->streams[j];
13385
13386 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13387 bool isImplDef = (newStream->format ==
13388 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13389 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13390 isImplDef);
13391
13392 if (isRotated && (!isImplDef || isZsl)) {
13393 LOGE("Error: Unsupported rotation of %d requested for stream"
13394 "type:%d and stream format:%d",
13395 newStream->rotation, newStream->stream_type,
13396 newStream->format);
13397 rc = -EINVAL;
13398 break;
13399 }
13400 }
13401
13402 return rc;
13403}
13404
13405/*===========================================================================
13406* FUNCTION : getFlashInfo
13407*
13408* DESCRIPTION: Retrieve information about whether the device has a flash.
13409*
13410* PARAMETERS :
13411* @cameraId : Camera id to query
13412* @hasFlash : Boolean indicating whether there is a flash device
13413* associated with given camera
13414* @flashNode : If a flash device exists, this will be its device node.
13415*
13416* RETURN :
13417* None
13418*==========================================================================*/
13419void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13420 bool& hasFlash,
13421 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13422{
13423 cam_capability_t* camCapability = gCamCapability[cameraId];
13424 if (NULL == camCapability) {
13425 hasFlash = false;
13426 flashNode[0] = '\0';
13427 } else {
13428 hasFlash = camCapability->flash_available;
13429 strlcpy(flashNode,
13430 (char*)camCapability->flash_dev_name,
13431 QCAMERA_MAX_FILEPATH_LENGTH);
13432 }
13433}
13434
13435/*===========================================================================
13436* FUNCTION : getEepromVersionInfo
13437*
13438* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13439*
13440* PARAMETERS : None
13441*
13442* RETURN : string describing EEPROM version
13443* "\0" if no such info available
13444*==========================================================================*/
13445const char *QCamera3HardwareInterface::getEepromVersionInfo()
13446{
13447 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13448}
13449
13450/*===========================================================================
13451* FUNCTION : getLdafCalib
13452*
13453* DESCRIPTION: Retrieve Laser AF calibration data
13454*
13455* PARAMETERS : None
13456*
13457* RETURN : Two uint32_t describing laser AF calibration data
13458* NULL if none is available.
13459*==========================================================================*/
13460const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13461{
13462 if (mLdafCalibExist) {
13463 return &mLdafCalib[0];
13464 } else {
13465 return NULL;
13466 }
13467}
13468
13469/*===========================================================================
13470 * FUNCTION : dynamicUpdateMetaStreamInfo
13471 *
13472 * DESCRIPTION: This function:
13473 * (1) stops all the channels
13474 * (2) returns error on pending requests and buffers
13475 * (3) sends metastream_info in setparams
13476 * (4) starts all channels
13477 * This is useful when sensor has to be restarted to apply any
13478 * settings such as frame rate from a different sensor mode
13479 *
13480 * PARAMETERS : None
13481 *
13482 * RETURN : NO_ERROR on success
13483 * Error codes on failure
13484 *
13485 *==========================================================================*/
13486int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13487{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013488 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013489 int rc = NO_ERROR;
13490
13491 LOGD("E");
13492
13493 rc = stopAllChannels();
13494 if (rc < 0) {
13495 LOGE("stopAllChannels failed");
13496 return rc;
13497 }
13498
13499 rc = notifyErrorForPendingRequests();
13500 if (rc < 0) {
13501 LOGE("notifyErrorForPendingRequests failed");
13502 return rc;
13503 }
13504
13505 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13506 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13507 "Format:%d",
13508 mStreamConfigInfo.type[i],
13509 mStreamConfigInfo.stream_sizes[i].width,
13510 mStreamConfigInfo.stream_sizes[i].height,
13511 mStreamConfigInfo.postprocess_mask[i],
13512 mStreamConfigInfo.format[i]);
13513 }
13514
13515 /* Send meta stream info once again so that ISP can start */
13516 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13517 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13518 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13519 mParameters);
13520 if (rc < 0) {
13521 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13522 }
13523
13524 rc = startAllChannels();
13525 if (rc < 0) {
13526 LOGE("startAllChannels failed");
13527 return rc;
13528 }
13529
13530 LOGD("X");
13531 return rc;
13532}
13533
13534/*===========================================================================
13535 * FUNCTION : stopAllChannels
13536 *
13537 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13538 *
13539 * PARAMETERS : None
13540 *
13541 * RETURN : NO_ERROR on success
13542 * Error codes on failure
13543 *
13544 *==========================================================================*/
13545int32_t QCamera3HardwareInterface::stopAllChannels()
13546{
13547 int32_t rc = NO_ERROR;
13548
13549 LOGD("Stopping all channels");
13550 // Stop the Streams/Channels
13551 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13552 it != mStreamInfo.end(); it++) {
13553 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13554 if (channel) {
13555 channel->stop();
13556 }
13557 (*it)->status = INVALID;
13558 }
13559
13560 if (mSupportChannel) {
13561 mSupportChannel->stop();
13562 }
13563 if (mAnalysisChannel) {
13564 mAnalysisChannel->stop();
13565 }
13566 if (mRawDumpChannel) {
13567 mRawDumpChannel->stop();
13568 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013569 if (mHdrPlusRawSrcChannel) {
13570 mHdrPlusRawSrcChannel->stop();
13571 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013572 if (mMetadataChannel) {
13573 /* If content of mStreamInfo is not 0, there is metadata stream */
13574 mMetadataChannel->stop();
13575 }
13576
13577 LOGD("All channels stopped");
13578 return rc;
13579}
13580
13581/*===========================================================================
13582 * FUNCTION : startAllChannels
13583 *
13584 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13585 *
13586 * PARAMETERS : None
13587 *
13588 * RETURN : NO_ERROR on success
13589 * Error codes on failure
13590 *
13591 *==========================================================================*/
13592int32_t QCamera3HardwareInterface::startAllChannels()
13593{
13594 int32_t rc = NO_ERROR;
13595
13596 LOGD("Start all channels ");
13597 // Start the Streams/Channels
13598 if (mMetadataChannel) {
13599 /* If content of mStreamInfo is not 0, there is metadata stream */
13600 rc = mMetadataChannel->start();
13601 if (rc < 0) {
13602 LOGE("META channel start failed");
13603 return rc;
13604 }
13605 }
13606 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13607 it != mStreamInfo.end(); it++) {
13608 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13609 if (channel) {
13610 rc = channel->start();
13611 if (rc < 0) {
13612 LOGE("channel start failed");
13613 return rc;
13614 }
13615 }
13616 }
13617 if (mAnalysisChannel) {
13618 mAnalysisChannel->start();
13619 }
13620 if (mSupportChannel) {
13621 rc = mSupportChannel->start();
13622 if (rc < 0) {
13623 LOGE("Support channel start failed");
13624 return rc;
13625 }
13626 }
13627 if (mRawDumpChannel) {
13628 rc = mRawDumpChannel->start();
13629 if (rc < 0) {
13630 LOGE("RAW dump channel start failed");
13631 return rc;
13632 }
13633 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013634 if (mHdrPlusRawSrcChannel) {
13635 rc = mHdrPlusRawSrcChannel->start();
13636 if (rc < 0) {
13637 LOGE("HDR+ RAW channel start failed");
13638 return rc;
13639 }
13640 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013641
13642 LOGD("All channels started");
13643 return rc;
13644}
13645
13646/*===========================================================================
13647 * FUNCTION : notifyErrorForPendingRequests
13648 *
13649 * DESCRIPTION: This function sends error for all the pending requests/buffers
13650 *
13651 * PARAMETERS : None
13652 *
13653 * RETURN : Error codes
13654 * NO_ERROR on success
13655 *
13656 *==========================================================================*/
13657int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13658{
13659 int32_t rc = NO_ERROR;
13660 unsigned int frameNum = 0;
13661 camera3_capture_result_t result;
13662 camera3_stream_buffer_t *pStream_Buf = NULL;
13663
13664 memset(&result, 0, sizeof(camera3_capture_result_t));
13665
13666 if (mPendingRequestsList.size() > 0) {
13667 pendingRequestIterator i = mPendingRequestsList.begin();
13668 frameNum = i->frame_number;
13669 } else {
13670 /* There might still be pending buffers even though there are
13671 no pending requests. Setting the frameNum to MAX so that
13672 all the buffers with smaller frame numbers are returned */
13673 frameNum = UINT_MAX;
13674 }
13675
13676 LOGH("Oldest frame num on mPendingRequestsList = %u",
13677 frameNum);
13678
Emilian Peev7650c122017-01-19 08:24:33 -080013679 notifyErrorFoPendingDepthData(mDepthChannel);
13680
Thierry Strudel3d639192016-09-09 11:52:26 -070013681 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13682 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13683
13684 if (req->frame_number < frameNum) {
13685 // Send Error notify to frameworks for each buffer for which
13686 // metadata buffer is already sent
13687 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13688 req->frame_number, req->mPendingBufferList.size());
13689
13690 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13691 if (NULL == pStream_Buf) {
13692 LOGE("No memory for pending buffers array");
13693 return NO_MEMORY;
13694 }
13695 memset(pStream_Buf, 0,
13696 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13697 result.result = NULL;
13698 result.frame_number = req->frame_number;
13699 result.num_output_buffers = req->mPendingBufferList.size();
13700 result.output_buffers = pStream_Buf;
13701
13702 size_t index = 0;
13703 for (auto info = req->mPendingBufferList.begin();
13704 info != req->mPendingBufferList.end(); ) {
13705
13706 camera3_notify_msg_t notify_msg;
13707 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13708 notify_msg.type = CAMERA3_MSG_ERROR;
13709 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13710 notify_msg.message.error.error_stream = info->stream;
13711 notify_msg.message.error.frame_number = req->frame_number;
13712 pStream_Buf[index].acquire_fence = -1;
13713 pStream_Buf[index].release_fence = -1;
13714 pStream_Buf[index].buffer = info->buffer;
13715 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13716 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013717 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013718 index++;
13719 // Remove buffer from list
13720 info = req->mPendingBufferList.erase(info);
13721 }
13722
13723 // Remove this request from Map
13724 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13725 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13726 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13727
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013728 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013729
13730 delete [] pStream_Buf;
13731 } else {
13732
13733 // Go through the pending requests info and send error request to framework
13734 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13735
13736 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13737
13738 // Send error notify to frameworks
13739 camera3_notify_msg_t notify_msg;
13740 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13741 notify_msg.type = CAMERA3_MSG_ERROR;
13742 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13743 notify_msg.message.error.error_stream = NULL;
13744 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013745 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013746
13747 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13748 if (NULL == pStream_Buf) {
13749 LOGE("No memory for pending buffers array");
13750 return NO_MEMORY;
13751 }
13752 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13753
13754 result.result = NULL;
13755 result.frame_number = req->frame_number;
13756 result.input_buffer = i->input_buffer;
13757 result.num_output_buffers = req->mPendingBufferList.size();
13758 result.output_buffers = pStream_Buf;
13759
13760 size_t index = 0;
13761 for (auto info = req->mPendingBufferList.begin();
13762 info != req->mPendingBufferList.end(); ) {
13763 pStream_Buf[index].acquire_fence = -1;
13764 pStream_Buf[index].release_fence = -1;
13765 pStream_Buf[index].buffer = info->buffer;
13766 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13767 pStream_Buf[index].stream = info->stream;
13768 index++;
13769 // Remove buffer from list
13770 info = req->mPendingBufferList.erase(info);
13771 }
13772
13773 // Remove this request from Map
13774 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13775 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13776 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13777
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013778 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013779 delete [] pStream_Buf;
13780 i = erasePendingRequest(i);
13781 }
13782 }
13783
13784 /* Reset pending frame Drop list and requests list */
13785 mPendingFrameDropList.clear();
13786
13787 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13788 req.mPendingBufferList.clear();
13789 }
13790 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013791 LOGH("Cleared all the pending buffers ");
13792
13793 return rc;
13794}
13795
13796bool QCamera3HardwareInterface::isOnEncoder(
13797 const cam_dimension_t max_viewfinder_size,
13798 uint32_t width, uint32_t height)
13799{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013800 return ((width > (uint32_t)max_viewfinder_size.width) ||
13801 (height > (uint32_t)max_viewfinder_size.height) ||
13802 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13803 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013804}
13805
13806/*===========================================================================
13807 * FUNCTION : setBundleInfo
13808 *
13809 * DESCRIPTION: Set bundle info for all streams that are bundle.
13810 *
13811 * PARAMETERS : None
13812 *
13813 * RETURN : NO_ERROR on success
13814 * Error codes on failure
13815 *==========================================================================*/
13816int32_t QCamera3HardwareInterface::setBundleInfo()
13817{
13818 int32_t rc = NO_ERROR;
13819
13820 if (mChannelHandle) {
13821 cam_bundle_config_t bundleInfo;
13822 memset(&bundleInfo, 0, sizeof(bundleInfo));
13823 rc = mCameraHandle->ops->get_bundle_info(
13824 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13825 if (rc != NO_ERROR) {
13826 LOGE("get_bundle_info failed");
13827 return rc;
13828 }
13829 if (mAnalysisChannel) {
13830 mAnalysisChannel->setBundleInfo(bundleInfo);
13831 }
13832 if (mSupportChannel) {
13833 mSupportChannel->setBundleInfo(bundleInfo);
13834 }
13835 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13836 it != mStreamInfo.end(); it++) {
13837 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13838 channel->setBundleInfo(bundleInfo);
13839 }
13840 if (mRawDumpChannel) {
13841 mRawDumpChannel->setBundleInfo(bundleInfo);
13842 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013843 if (mHdrPlusRawSrcChannel) {
13844 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13845 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013846 }
13847
13848 return rc;
13849}
13850
13851/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013852 * FUNCTION : setInstantAEC
13853 *
13854 * DESCRIPTION: Set Instant AEC related params.
13855 *
13856 * PARAMETERS :
13857 * @meta: CameraMetadata reference
13858 *
13859 * RETURN : NO_ERROR on success
13860 * Error codes on failure
13861 *==========================================================================*/
13862int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13863{
13864 int32_t rc = NO_ERROR;
13865 uint8_t val = 0;
13866 char prop[PROPERTY_VALUE_MAX];
13867
13868 // First try to configure instant AEC from framework metadata
13869 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13870 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13871 }
13872
13873 // If framework did not set this value, try to read from set prop.
13874 if (val == 0) {
13875 memset(prop, 0, sizeof(prop));
13876 property_get("persist.camera.instant.aec", prop, "0");
13877 val = (uint8_t)atoi(prop);
13878 }
13879
13880 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13881 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13882 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13883 mInstantAEC = val;
13884 mInstantAECSettledFrameNumber = 0;
13885 mInstantAecFrameIdxCount = 0;
13886 LOGH("instantAEC value set %d",val);
13887 if (mInstantAEC) {
13888 memset(prop, 0, sizeof(prop));
13889 property_get("persist.camera.ae.instant.bound", prop, "10");
13890 int32_t aec_frame_skip_cnt = atoi(prop);
13891 if (aec_frame_skip_cnt >= 0) {
13892 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13893 } else {
13894 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13895 rc = BAD_VALUE;
13896 }
13897 }
13898 } else {
13899 LOGE("Bad instant aec value set %d", val);
13900 rc = BAD_VALUE;
13901 }
13902 return rc;
13903}
13904
13905/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013906 * FUNCTION : get_num_overall_buffers
13907 *
13908 * DESCRIPTION: Estimate number of pending buffers across all requests.
13909 *
13910 * PARAMETERS : None
13911 *
13912 * RETURN : Number of overall pending buffers
13913 *
13914 *==========================================================================*/
13915uint32_t PendingBuffersMap::get_num_overall_buffers()
13916{
13917 uint32_t sum_buffers = 0;
13918 for (auto &req : mPendingBuffersInRequest) {
13919 sum_buffers += req.mPendingBufferList.size();
13920 }
13921 return sum_buffers;
13922}
13923
13924/*===========================================================================
13925 * FUNCTION : removeBuf
13926 *
13927 * DESCRIPTION: Remove a matching buffer from tracker.
13928 *
13929 * PARAMETERS : @buffer: image buffer for the callback
13930 *
13931 * RETURN : None
13932 *
13933 *==========================================================================*/
13934void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13935{
13936 bool buffer_found = false;
13937 for (auto req = mPendingBuffersInRequest.begin();
13938 req != mPendingBuffersInRequest.end(); req++) {
13939 for (auto k = req->mPendingBufferList.begin();
13940 k != req->mPendingBufferList.end(); k++ ) {
13941 if (k->buffer == buffer) {
13942 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13943 req->frame_number, buffer);
13944 k = req->mPendingBufferList.erase(k);
13945 if (req->mPendingBufferList.empty()) {
13946 // Remove this request from Map
13947 req = mPendingBuffersInRequest.erase(req);
13948 }
13949 buffer_found = true;
13950 break;
13951 }
13952 }
13953 if (buffer_found) {
13954 break;
13955 }
13956 }
13957 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13958 get_num_overall_buffers());
13959}
13960
13961/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013962 * FUNCTION : getBufErrStatus
13963 *
13964 * DESCRIPTION: get buffer error status
13965 *
13966 * PARAMETERS : @buffer: buffer handle
13967 *
13968 * RETURN : Error status
13969 *
13970 *==========================================================================*/
13971int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13972{
13973 for (auto& req : mPendingBuffersInRequest) {
13974 for (auto& k : req.mPendingBufferList) {
13975 if (k.buffer == buffer)
13976 return k.bufStatus;
13977 }
13978 }
13979 return CAMERA3_BUFFER_STATUS_OK;
13980}
13981
13982/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013983 * FUNCTION : setPAAFSupport
13984 *
13985 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13986 * feature mask according to stream type and filter
13987 * arrangement
13988 *
13989 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13990 * @stream_type: stream type
13991 * @filter_arrangement: filter arrangement
13992 *
13993 * RETURN : None
13994 *==========================================================================*/
13995void QCamera3HardwareInterface::setPAAFSupport(
13996 cam_feature_mask_t& feature_mask,
13997 cam_stream_type_t stream_type,
13998 cam_color_filter_arrangement_t filter_arrangement)
13999{
Thierry Strudel3d639192016-09-09 11:52:26 -070014000 switch (filter_arrangement) {
14001 case CAM_FILTER_ARRANGEMENT_RGGB:
14002 case CAM_FILTER_ARRANGEMENT_GRBG:
14003 case CAM_FILTER_ARRANGEMENT_GBRG:
14004 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014005 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14006 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014007 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014008 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14009 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014010 }
14011 break;
14012 case CAM_FILTER_ARRANGEMENT_Y:
14013 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14014 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14015 }
14016 break;
14017 default:
14018 break;
14019 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014020 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14021 feature_mask, stream_type, filter_arrangement);
14022
14023
Thierry Strudel3d639192016-09-09 11:52:26 -070014024}
14025
14026/*===========================================================================
14027* FUNCTION : getSensorMountAngle
14028*
14029* DESCRIPTION: Retrieve sensor mount angle
14030*
14031* PARAMETERS : None
14032*
14033* RETURN : sensor mount angle in uint32_t
14034*==========================================================================*/
14035uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14036{
14037 return gCamCapability[mCameraId]->sensor_mount_angle;
14038}
14039
14040/*===========================================================================
14041* FUNCTION : getRelatedCalibrationData
14042*
14043* DESCRIPTION: Retrieve related system calibration data
14044*
14045* PARAMETERS : None
14046*
14047* RETURN : Pointer of related system calibration data
14048*==========================================================================*/
14049const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14050{
14051 return (const cam_related_system_calibration_data_t *)
14052 &(gCamCapability[mCameraId]->related_cam_calibration);
14053}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014054
14055/*===========================================================================
14056 * FUNCTION : is60HzZone
14057 *
14058 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14059 *
14060 * PARAMETERS : None
14061 *
14062 * RETURN : True if in 60Hz zone, False otherwise
14063 *==========================================================================*/
14064bool QCamera3HardwareInterface::is60HzZone()
14065{
14066 time_t t = time(NULL);
14067 struct tm lt;
14068
14069 struct tm* r = localtime_r(&t, &lt);
14070
14071 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14072 return true;
14073 else
14074 return false;
14075}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014076
14077/*===========================================================================
14078 * FUNCTION : adjustBlackLevelForCFA
14079 *
14080 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14081 * of bayer CFA (Color Filter Array).
14082 *
14083 * PARAMETERS : @input: black level pattern in the order of RGGB
14084 * @output: black level pattern in the order of CFA
14085 * @color_arrangement: CFA color arrangement
14086 *
14087 * RETURN : None
14088 *==========================================================================*/
14089template<typename T>
14090void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14091 T input[BLACK_LEVEL_PATTERN_CNT],
14092 T output[BLACK_LEVEL_PATTERN_CNT],
14093 cam_color_filter_arrangement_t color_arrangement)
14094{
14095 switch (color_arrangement) {
14096 case CAM_FILTER_ARRANGEMENT_GRBG:
14097 output[0] = input[1];
14098 output[1] = input[0];
14099 output[2] = input[3];
14100 output[3] = input[2];
14101 break;
14102 case CAM_FILTER_ARRANGEMENT_GBRG:
14103 output[0] = input[2];
14104 output[1] = input[3];
14105 output[2] = input[0];
14106 output[3] = input[1];
14107 break;
14108 case CAM_FILTER_ARRANGEMENT_BGGR:
14109 output[0] = input[3];
14110 output[1] = input[2];
14111 output[2] = input[1];
14112 output[3] = input[0];
14113 break;
14114 case CAM_FILTER_ARRANGEMENT_RGGB:
14115 output[0] = input[0];
14116 output[1] = input[1];
14117 output[2] = input[2];
14118 output[3] = input[3];
14119 break;
14120 default:
14121 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14122 break;
14123 }
14124}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014125
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014126void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14127 CameraMetadata &resultMetadata,
14128 std::shared_ptr<metadata_buffer_t> settings)
14129{
14130 if (settings == nullptr) {
14131 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14132 return;
14133 }
14134
14135 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14136 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14137 }
14138
14139 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14140 String8 str((const char *)gps_methods);
14141 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14142 }
14143
14144 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14145 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14146 }
14147
14148 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14149 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14150 }
14151
14152 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14153 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14154 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14155 }
14156
14157 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14158 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14159 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14160 }
14161
14162 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14163 int32_t fwk_thumb_size[2];
14164 fwk_thumb_size[0] = thumb_size->width;
14165 fwk_thumb_size[1] = thumb_size->height;
14166 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14167 }
14168
14169 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14170 uint8_t fwk_intent = intent[0];
14171 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14172 }
14173}
14174
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014175bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
14176 const camera3_capture_request_t &request, const CameraMetadata &metadata)
14177{
14178 if (hdrPlusRequest == nullptr) return false;
14179
14180 // Check noise reduction mode is high quality.
14181 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14182 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14183 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014184 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14185 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014186 return false;
14187 }
14188
14189 // Check edge mode is high quality.
14190 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14191 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14192 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14193 return false;
14194 }
14195
14196 if (request.num_output_buffers != 1 ||
14197 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14198 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014199 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14200 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14201 request.output_buffers[0].stream->width,
14202 request.output_buffers[0].stream->height,
14203 request.output_buffers[0].stream->format);
14204 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014205 return false;
14206 }
14207
14208 // Get a YUV buffer from pic channel.
14209 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14210 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14211 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14212 if (res != OK) {
14213 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14214 __FUNCTION__, strerror(-res), res);
14215 return false;
14216 }
14217
14218 pbcamera::StreamBuffer buffer;
14219 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014220 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014221 buffer.data = yuvBuffer->buffer;
14222 buffer.dataSize = yuvBuffer->frame_len;
14223
14224 pbcamera::CaptureRequest pbRequest;
14225 pbRequest.id = request.frame_number;
14226 pbRequest.outputBuffers.push_back(buffer);
14227
14228 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014229 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014230 if (res != OK) {
14231 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14232 strerror(-res), res);
14233 return false;
14234 }
14235
14236 hdrPlusRequest->yuvBuffer = yuvBuffer;
14237 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14238
14239 return true;
14240}
14241
Chien-Yu Chenee335912017-02-09 17:53:20 -080014242status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14243{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014244 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014245 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
14246 return -ENODEV;
14247 }
14248
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014249 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014250
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014251 // Connect to HDR+ service if it's not connected yet.
14252 pthread_mutex_lock(&gCamLock);
14253 if (!gEaselConnected) {
14254 // Connect to HDR+ service
14255 res = gHdrPlusClient->connect(this);
14256 if (res != OK) {
14257 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
14258 strerror(-res), res);
14259 pthread_mutex_unlock(&gCamLock);
14260 return res;
14261 }
14262
14263 // Set static metadata.
14264 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14265 if (res != OK) {
14266 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
14267 strerror(-res), res);
14268 gHdrPlusClient->disconnect();
14269 pthread_mutex_unlock(&gCamLock);
14270 return res;
14271 }
14272 gEaselConnected = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014273 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014274 pthread_mutex_unlock(&gCamLock);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014275
14276 // Configure stream for HDR+.
14277 res = configureHdrPlusStreamsLocked();
14278 if (res != OK) {
14279 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014280 return res;
14281 }
14282
14283 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14284 res = gHdrPlusClient->setZslHdrPlusMode(true);
14285 if (res != OK) {
14286 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014287 return res;
14288 }
14289
14290 mHdrPlusModeEnabled = true;
14291 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14292
14293 return OK;
14294}
14295
14296void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14297{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014298 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014299 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014300 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14301 if (res != OK) {
14302 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14303 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014304 }
14305
14306 mHdrPlusModeEnabled = false;
14307 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14308}
14309
14310status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014311{
14312 pbcamera::InputConfiguration inputConfig;
14313 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14314 status_t res = OK;
14315
14316 // Configure HDR+ client streams.
14317 // Get input config.
14318 if (mHdrPlusRawSrcChannel) {
14319 // HDR+ input buffers will be provided by HAL.
14320 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14321 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14322 if (res != OK) {
14323 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14324 __FUNCTION__, strerror(-res), res);
14325 return res;
14326 }
14327
14328 inputConfig.isSensorInput = false;
14329 } else {
14330 // Sensor MIPI will send data to Easel.
14331 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014332 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014333 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14334 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14335 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14336 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14337 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14338 if (mSensorModeInfo.num_raw_bits != 10) {
14339 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14340 mSensorModeInfo.num_raw_bits);
14341 return BAD_VALUE;
14342 }
14343
14344 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014345 }
14346
14347 // Get output configurations.
14348 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014349 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014350
14351 // Easel may need to output YUV output buffers if mPictureChannel was created.
14352 pbcamera::StreamConfiguration yuvOutputConfig;
14353 if (mPictureChannel != nullptr) {
14354 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14355 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14356 if (res != OK) {
14357 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14358 __FUNCTION__, strerror(-res), res);
14359
14360 return res;
14361 }
14362
14363 outputStreamConfigs.push_back(yuvOutputConfig);
14364 }
14365
14366 // TODO: consider other channels for YUV output buffers.
14367
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014368 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014369 if (res != OK) {
14370 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14371 strerror(-res), res);
14372 return res;
14373 }
14374
14375 return OK;
14376}
14377
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014378void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14379 const camera_metadata_t &resultMetadata) {
14380 if (result != nullptr) {
14381 if (result->outputBuffers.size() != 1) {
14382 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14383 result->outputBuffers.size());
14384 return;
14385 }
14386
14387 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14388 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14389 result->outputBuffers[0].streamId);
14390 return;
14391 }
14392
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014393 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014394 HdrPlusPendingRequest pendingRequest;
14395 {
14396 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14397 auto req = mHdrPlusPendingRequests.find(result->requestId);
14398 pendingRequest = req->second;
14399 }
14400
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014401 // Update the result metadata with the settings of the HDR+ still capture request because
14402 // the result metadata belongs to a ZSL buffer.
14403 CameraMetadata metadata;
14404 metadata = &resultMetadata;
14405 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14406 camera_metadata_t* updatedResultMetadata = metadata.release();
14407
14408 QCamera3PicChannel *picChannel =
14409 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14410
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014411 // Check if dumping HDR+ YUV output is enabled.
14412 char prop[PROPERTY_VALUE_MAX];
14413 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14414 bool dumpYuvOutput = atoi(prop);
14415
14416 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014417 // Dump yuv buffer to a ppm file.
14418 pbcamera::StreamConfiguration outputConfig;
14419 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14420 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14421 if (rc == OK) {
14422 char buf[FILENAME_MAX] = {};
14423 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14424 result->requestId, result->outputBuffers[0].streamId,
14425 outputConfig.image.width, outputConfig.image.height);
14426
14427 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14428 } else {
14429 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14430 __FUNCTION__, strerror(-rc), rc);
14431 }
14432 }
14433
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014434 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14435 auto halMetadata = std::make_shared<metadata_buffer_t>();
14436 clear_metadata_buffer(halMetadata.get());
14437
14438 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14439 // encoding.
14440 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14441 halStreamId, /*minFrameDuration*/0);
14442 if (res == OK) {
14443 // Return the buffer to pic channel for encoding.
14444 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14445 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14446 halMetadata);
14447 } else {
14448 // Return the buffer without encoding.
14449 // TODO: This should not happen but we may want to report an error buffer to camera
14450 // service.
14451 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14452 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14453 strerror(-res), res);
14454 }
14455
14456 // Send HDR+ metadata to framework.
14457 {
14458 pthread_mutex_lock(&mMutex);
14459
14460 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14461 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14462 pthread_mutex_unlock(&mMutex);
14463 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014464
14465 // Remove the HDR+ pending request.
14466 {
14467 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14468 auto req = mHdrPlusPendingRequests.find(result->requestId);
14469 mHdrPlusPendingRequests.erase(req);
14470 }
14471 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014472}
14473
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014474void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14475 // TODO: Handle HDR+ capture failures and send the failure to framework.
14476 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14477 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14478
14479 // Return the buffer to pic channel.
14480 QCamera3PicChannel *picChannel =
14481 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14482 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14483
14484 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014485}
14486
Thierry Strudel3d639192016-09-09 11:52:26 -070014487}; //end namespace qcamera