blob: 0bed08f68c212625b8251f524f6ff027a770acf2 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700135// If Easel is connected.
136bool gEaselConnected;
Thierry Strudel3d639192016-09-09 11:52:26 -0700137
138const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
139 {"On", CAM_CDS_MODE_ON},
140 {"Off", CAM_CDS_MODE_OFF},
141 {"Auto",CAM_CDS_MODE_AUTO}
142};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_video_hdr_mode_t,
145 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
146 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
147 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
148};
149
Thierry Strudel54dc9782017-02-15 12:12:10 -0800150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_binning_correction_mode_t,
152 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
153 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
154 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
155};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700156
157const QCamera3HardwareInterface::QCameraMap<
158 camera_metadata_enum_android_ir_mode_t,
159 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
160 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
161 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
162 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
163};
Thierry Strudel3d639192016-09-09 11:52:26 -0700164
165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_control_effect_mode_t,
167 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
168 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
169 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
170 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
171 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
173 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
174 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
175 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
176 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
177};
178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_control_awb_mode_t,
181 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
182 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
183 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
184 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
185 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
186 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
187 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
188 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
189 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
190 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194 camera_metadata_enum_android_control_scene_mode_t,
195 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
196 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
197 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
198 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
199 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
200 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
201 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
202 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
203 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
204 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
205 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
206 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
207 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
208 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
209 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
210 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800211 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
212 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_af_mode_t,
217 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
218 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
219 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
220 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
221 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
222 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
223 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
224 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_color_correction_aberration_mode_t,
229 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
231 CAM_COLOR_CORRECTION_ABERRATION_OFF },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
233 CAM_COLOR_CORRECTION_ABERRATION_FAST },
234 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
235 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_ae_antibanding_mode_t,
240 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
243 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
244 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_control_ae_mode_t,
249 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
250 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
251 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
253 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
254 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_flash_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
260 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
262 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_statistics_face_detect_mode_t,
267 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
269 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
270 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
275 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
277 CAM_FOCUS_UNCALIBRATED },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
279 CAM_FOCUS_APPROXIMATE },
280 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
281 CAM_FOCUS_CALIBRATED }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_state_t,
286 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
287 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
288 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
289};
290
291const int32_t available_thumbnail_sizes[] = {0, 0,
292 176, 144,
293 240, 144,
294 256, 144,
295 240, 160,
296 256, 154,
297 240, 240,
298 320, 240};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_sensor_test_pattern_mode_t,
302 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
307 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
308 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
309};
310
311/* Since there is no mapping for all the options some Android enum are not listed.
312 * Also, the order in this list is important because while mapping from HAL to Android it will
313 * traverse from lower to higher index which means that for HAL values that are map to different
314 * Android values, the traverse logic will select the first one found.
315 */
316const QCamera3HardwareInterface::QCameraMap<
317 camera_metadata_enum_android_sensor_reference_illuminant1_t,
318 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
335};
336
337const QCamera3HardwareInterface::QCameraMap<
338 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
339 { 60, CAM_HFR_MODE_60FPS},
340 { 90, CAM_HFR_MODE_90FPS},
341 { 120, CAM_HFR_MODE_120FPS},
342 { 150, CAM_HFR_MODE_150FPS},
343 { 180, CAM_HFR_MODE_180FPS},
344 { 210, CAM_HFR_MODE_210FPS},
345 { 240, CAM_HFR_MODE_240FPS},
346 { 480, CAM_HFR_MODE_480FPS},
347};
348
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700349const QCamera3HardwareInterface::QCameraMap<
350 qcamera3_ext_instant_aec_mode_t,
351 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
352 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
353 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
354 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
355};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800356
357const QCamera3HardwareInterface::QCameraMap<
358 qcamera3_ext_exposure_meter_mode_t,
359 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
360 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
361 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
362 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
363 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
364 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
365 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
366 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
367};
368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_iso_mode_t,
371 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
372 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
373 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
374 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
375 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
376 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
377 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
378 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
379 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
380};
381
Thierry Strudel3d639192016-09-09 11:52:26 -0700382camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
383 .initialize = QCamera3HardwareInterface::initialize,
384 .configure_streams = QCamera3HardwareInterface::configure_streams,
385 .register_stream_buffers = NULL,
386 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
387 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
388 .get_metadata_vendor_tag_ops = NULL,
389 .dump = QCamera3HardwareInterface::dump,
390 .flush = QCamera3HardwareInterface::flush,
391 .reserved = {0},
392};
393
394// initialise to some default value
395uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
396
397/*===========================================================================
398 * FUNCTION : QCamera3HardwareInterface
399 *
400 * DESCRIPTION: constructor of QCamera3HardwareInterface
401 *
402 * PARAMETERS :
403 * @cameraId : camera ID
404 *
405 * RETURN : none
406 *==========================================================================*/
407QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
408 const camera_module_callbacks_t *callbacks)
409 : mCameraId(cameraId),
410 mCameraHandle(NULL),
411 mCameraInitialized(false),
412 mCallbackOps(NULL),
413 mMetadataChannel(NULL),
414 mPictureChannel(NULL),
415 mRawChannel(NULL),
416 mSupportChannel(NULL),
417 mAnalysisChannel(NULL),
418 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700419 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800421 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800422 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700423 mChannelHandle(0),
424 mFirstConfiguration(true),
425 mFlush(false),
426 mFlushPerf(false),
427 mParamHeap(NULL),
428 mParameters(NULL),
429 mPrevParameters(NULL),
430 m_bIsVideo(false),
431 m_bIs4KVideo(false),
432 m_bEisSupportedSize(false),
433 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800434 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 m_MobicatMask(0),
436 mMinProcessedFrameDuration(0),
437 mMinJpegFrameDuration(0),
438 mMinRawFrameDuration(0),
439 mMetaFrameCount(0U),
440 mUpdateDebugLevel(false),
441 mCallbacks(callbacks),
442 mCaptureIntent(0),
443 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700444 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800445 /* DevCamDebug metadata internal m control*/
446 mDevCamDebugMetaEnable(0),
447 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mBatchSize(0),
449 mToBeQueuedVidBufs(0),
450 mHFRVideoFps(DEFAULT_VIDEO_FPS),
451 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800452 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800453 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mFirstFrameNumberInBatch(0),
455 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700457 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
458 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000459 mPDSupported(false),
460 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700461 mInstantAEC(false),
462 mResetInstantAEC(false),
463 mInstantAECSettledFrameNumber(0),
464 mAecSkipDisplayFrameBound(0),
465 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800466 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700467 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700468 mLastCustIntentFrmNum(-1),
469 mState(CLOSED),
470 mIsDeviceLinked(false),
471 mIsMainCamera(true),
472 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700473 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800475 mHdrPlusModeEnabled(false),
476 mIsApInputUsedForHdrPlus(false),
477 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800478 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700479{
480 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mCommon.init(gCamCapability[cameraId]);
482 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#ifndef USE_HAL_3_3
484 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
485#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mCameraDevice.common.close = close_camera_device;
489 mCameraDevice.ops = &mCameraOps;
490 mCameraDevice.priv = this;
491 gCamCapability[cameraId]->version = CAM_HAL_V3;
492 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
493 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
494 gCamCapability[cameraId]->min_num_pp_bufs = 3;
495
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800496 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700497
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800498 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mPendingLiveRequest = 0;
500 mCurrentRequestId = -1;
501 pthread_mutex_init(&mMutex, NULL);
502
503 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
504 mDefaultMetadata[i] = NULL;
505
506 // Getting system props of different kinds
507 char prop[PROPERTY_VALUE_MAX];
508 memset(prop, 0, sizeof(prop));
509 property_get("persist.camera.raw.dump", prop, "0");
510 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800511 property_get("persist.camera.hal3.force.hdr", prop, "0");
512 mForceHdrSnapshot = atoi(prop);
513
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 if (mEnableRawDump)
515 LOGD("Raw dump from Camera HAL enabled");
516
517 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
518 memset(mLdafCalib, 0, sizeof(mLdafCalib));
519
520 memset(prop, 0, sizeof(prop));
521 property_get("persist.camera.tnr.preview", prop, "0");
522 m_bTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800525 property_get("persist.camera.swtnr.preview", prop, "1");
526 m_bSwTnrPreview = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 property_get("persist.camera.tnr.video", prop, "0");
530 m_bTnrVideo = (uint8_t)atoi(prop);
531
532 memset(prop, 0, sizeof(prop));
533 property_get("persist.camera.avtimer.debug", prop, "0");
534 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800535 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700536
Thierry Strudel54dc9782017-02-15 12:12:10 -0800537 memset(prop, 0, sizeof(prop));
538 property_get("persist.camera.cacmode.disable", prop, "0");
539 m_cacModeDisabled = (uint8_t)atoi(prop);
540
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 //Load and read GPU library.
542 lib_surface_utils = NULL;
543 LINK_get_surface_pixel_alignment = NULL;
544 mSurfaceStridePadding = CAM_PAD_TO_32;
545 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
546 if (lib_surface_utils) {
547 *(void **)&LINK_get_surface_pixel_alignment =
548 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
549 if (LINK_get_surface_pixel_alignment) {
550 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
551 }
552 dlclose(lib_surface_utils);
553 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700554
Emilian Peev0f3c3162017-03-15 12:57:46 +0000555 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
556 mPDSupported = (0 <= mPDIndex) ? true : false;
557
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700558 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700559}
560
561/*===========================================================================
562 * FUNCTION : ~QCamera3HardwareInterface
563 *
564 * DESCRIPTION: destructor of QCamera3HardwareInterface
565 *
566 * PARAMETERS : none
567 *
568 * RETURN : none
569 *==========================================================================*/
570QCamera3HardwareInterface::~QCamera3HardwareInterface()
571{
572 LOGD("E");
573
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800574 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700575
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 // Disable power hint and enable the perf lock for close camera
577 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
578 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
579
580 // unlink of dualcam during close camera
581 if (mIsDeviceLinked) {
582 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
583 &m_pDualCamCmdPtr->bundle_info;
584 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
585 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
586 pthread_mutex_lock(&gCamLock);
587
588 if (mIsMainCamera == 1) {
589 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
590 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
591 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
592 // related session id should be session id of linked session
593 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
594 } else {
595 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
596 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
597 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
598 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
599 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800600 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800601 pthread_mutex_unlock(&gCamLock);
602
603 rc = mCameraHandle->ops->set_dual_cam_cmd(
604 mCameraHandle->camera_handle);
605 if (rc < 0) {
606 LOGE("Dualcam: Unlink failed, but still proceed to close");
607 }
608 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700609
610 /* We need to stop all streams before deleting any stream */
611 if (mRawDumpChannel) {
612 mRawDumpChannel->stop();
613 }
614
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700615 if (mHdrPlusRawSrcChannel) {
616 mHdrPlusRawSrcChannel->stop();
617 }
618
Thierry Strudel3d639192016-09-09 11:52:26 -0700619 // NOTE: 'camera3_stream_t *' objects are already freed at
620 // this stage by the framework
621 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
622 it != mStreamInfo.end(); it++) {
623 QCamera3ProcessingChannel *channel = (*it)->channel;
624 if (channel) {
625 channel->stop();
626 }
627 }
628 if (mSupportChannel)
629 mSupportChannel->stop();
630
631 if (mAnalysisChannel) {
632 mAnalysisChannel->stop();
633 }
634 if (mMetadataChannel) {
635 mMetadataChannel->stop();
636 }
637 if (mChannelHandle) {
638 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
639 mChannelHandle);
640 LOGD("stopping channel %d", mChannelHandle);
641 }
642
643 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
644 it != mStreamInfo.end(); it++) {
645 QCamera3ProcessingChannel *channel = (*it)->channel;
646 if (channel)
647 delete channel;
648 free (*it);
649 }
650 if (mSupportChannel) {
651 delete mSupportChannel;
652 mSupportChannel = NULL;
653 }
654
655 if (mAnalysisChannel) {
656 delete mAnalysisChannel;
657 mAnalysisChannel = NULL;
658 }
659 if (mRawDumpChannel) {
660 delete mRawDumpChannel;
661 mRawDumpChannel = NULL;
662 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700663 if (mHdrPlusRawSrcChannel) {
664 delete mHdrPlusRawSrcChannel;
665 mHdrPlusRawSrcChannel = NULL;
666 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700667 if (mDummyBatchChannel) {
668 delete mDummyBatchChannel;
669 mDummyBatchChannel = NULL;
670 }
671
672 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800673 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700674
675 if (mMetadataChannel) {
676 delete mMetadataChannel;
677 mMetadataChannel = NULL;
678 }
679
680 /* Clean up all channels */
681 if (mCameraInitialized) {
682 if(!mFirstConfiguration){
683 //send the last unconfigure
684 cam_stream_size_info_t stream_config_info;
685 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
686 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
687 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800688 m_bIs4KVideo ? 0 :
689 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700690 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700691 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
692 stream_config_info);
693 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
694 if (rc < 0) {
695 LOGE("set_parms failed for unconfigure");
696 }
697 }
698 deinitParameters();
699 }
700
701 if (mChannelHandle) {
702 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
703 mChannelHandle);
704 LOGH("deleting channel %d", mChannelHandle);
705 mChannelHandle = 0;
706 }
707
708 if (mState != CLOSED)
709 closeCamera();
710
711 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
712 req.mPendingBufferList.clear();
713 }
714 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700715 for (pendingRequestIterator i = mPendingRequestsList.begin();
716 i != mPendingRequestsList.end();) {
717 i = erasePendingRequest(i);
718 }
719 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
720 if (mDefaultMetadata[i])
721 free_camera_metadata(mDefaultMetadata[i]);
722
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800723 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700724
725 pthread_cond_destroy(&mRequestCond);
726
727 pthread_cond_destroy(&mBuffersCond);
728
729 pthread_mutex_destroy(&mMutex);
730 LOGD("X");
731}
732
733/*===========================================================================
734 * FUNCTION : erasePendingRequest
735 *
736 * DESCRIPTION: function to erase a desired pending request after freeing any
737 * allocated memory
738 *
739 * PARAMETERS :
740 * @i : iterator pointing to pending request to be erased
741 *
742 * RETURN : iterator pointing to the next request
743 *==========================================================================*/
744QCamera3HardwareInterface::pendingRequestIterator
745 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
746{
747 if (i->input_buffer != NULL) {
748 free(i->input_buffer);
749 i->input_buffer = NULL;
750 }
751 if (i->settings != NULL)
752 free_camera_metadata((camera_metadata_t*)i->settings);
753 return mPendingRequestsList.erase(i);
754}
755
756/*===========================================================================
757 * FUNCTION : camEvtHandle
758 *
759 * DESCRIPTION: Function registered to mm-camera-interface to handle events
760 *
761 * PARAMETERS :
762 * @camera_handle : interface layer camera handle
763 * @evt : ptr to event
764 * @user_data : user data ptr
765 *
766 * RETURN : none
767 *==========================================================================*/
768void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
769 mm_camera_event_t *evt,
770 void *user_data)
771{
772 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
773 if (obj && evt) {
774 switch(evt->server_event_type) {
775 case CAM_EVENT_TYPE_DAEMON_DIED:
776 pthread_mutex_lock(&obj->mMutex);
777 obj->mState = ERROR;
778 pthread_mutex_unlock(&obj->mMutex);
779 LOGE("Fatal, camera daemon died");
780 break;
781
782 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
783 LOGD("HAL got request pull from Daemon");
784 pthread_mutex_lock(&obj->mMutex);
785 obj->mWokenUpByDaemon = true;
786 obj->unblockRequestIfNecessary();
787 pthread_mutex_unlock(&obj->mMutex);
788 break;
789
790 default:
791 LOGW("Warning: Unhandled event %d",
792 evt->server_event_type);
793 break;
794 }
795 } else {
796 LOGE("NULL user_data/evt");
797 }
798}
799
800/*===========================================================================
801 * FUNCTION : openCamera
802 *
803 * DESCRIPTION: open camera
804 *
805 * PARAMETERS :
806 * @hw_device : double ptr for camera device struct
807 *
808 * RETURN : int32_t type of status
809 * NO_ERROR -- success
810 * none-zero failure code
811 *==========================================================================*/
812int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
813{
814 int rc = 0;
815 if (mState != CLOSED) {
816 *hw_device = NULL;
817 return PERMISSION_DENIED;
818 }
819
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
822 mCameraId);
823
824 rc = openCamera();
825 if (rc == 0) {
826 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800827 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700828 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800829 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
832 mCameraId, rc);
833
834 if (rc == NO_ERROR) {
835 mState = OPENED;
836 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800837
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700838 if (gHdrPlusClient != nullptr) {
839 mIsApInputUsedForHdrPlus =
840 property_get_bool("persist.camera.hdrplus.apinput", false);
841 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
842 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
843 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800844
Thierry Strudel3d639192016-09-09 11:52:26 -0700845 return rc;
846}
847
848/*===========================================================================
849 * FUNCTION : openCamera
850 *
851 * DESCRIPTION: open camera
852 *
853 * PARAMETERS : none
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera()
860{
861 int rc = 0;
862 char value[PROPERTY_VALUE_MAX];
863
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800864 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 if (mCameraHandle) {
866 LOGE("Failure: Camera already opened");
867 return ALREADY_EXISTS;
868 }
869
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800870 if (gHdrPlusClient != nullptr) {
871 rc = gHdrPlusClient->resumeEasel();
872 if (rc != 0) {
873 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
874 return rc;
875 }
876 }
877
Thierry Strudel3d639192016-09-09 11:52:26 -0700878 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
879 if (rc < 0) {
880 LOGE("Failed to reserve flash for camera id: %d",
881 mCameraId);
882 return UNKNOWN_ERROR;
883 }
884
885 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
886 if (rc) {
887 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
888 return rc;
889 }
890
891 if (!mCameraHandle) {
892 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
893 return -ENODEV;
894 }
895
896 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
897 camEvtHandle, (void *)this);
898
899 if (rc < 0) {
900 LOGE("Error, failed to register event callback");
901 /* Not closing camera here since it is already handled in destructor */
902 return FAILED_TRANSACTION;
903 }
904
905 mExifParams.debug_params =
906 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
907 if (mExifParams.debug_params) {
908 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
909 } else {
910 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
911 return NO_MEMORY;
912 }
913 mFirstConfiguration = true;
914
915 //Notify display HAL that a camera session is active.
916 //But avoid calling the same during bootup because camera service might open/close
917 //cameras at boot time during its initialization and display service will also internally
918 //wait for camera service to initialize first while calling this display API, resulting in a
919 //deadlock situation. Since boot time camera open/close calls are made only to fetch
920 //capabilities, no need of this display bw optimization.
921 //Use "service.bootanim.exit" property to know boot status.
922 property_get("service.bootanim.exit", value, "0");
923 if (atoi(value) == 1) {
924 pthread_mutex_lock(&gCamLock);
925 if (gNumCameraSessions++ == 0) {
926 setCameraLaunchStatus(true);
927 }
928 pthread_mutex_unlock(&gCamLock);
929 }
930
931 //fill the session id needed while linking dual cam
932 pthread_mutex_lock(&gCamLock);
933 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
934 &sessionId[mCameraId]);
935 pthread_mutex_unlock(&gCamLock);
936
937 if (rc < 0) {
938 LOGE("Error, failed to get sessiion id");
939 return UNKNOWN_ERROR;
940 } else {
941 //Allocate related cam sync buffer
942 //this is needed for the payload that goes along with bundling cmd for related
943 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700944 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
945 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700946 if(rc != OK) {
947 rc = NO_MEMORY;
948 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
949 return NO_MEMORY;
950 }
951
952 //Map memory for related cam sync buffer
953 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
955 m_pDualCamCmdHeap->getFd(0),
956 sizeof(cam_dual_camera_cmd_info_t),
957 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700958 if(rc < 0) {
959 LOGE("Dualcam: failed to map Related cam sync buffer");
960 rc = FAILED_TRANSACTION;
961 return NO_MEMORY;
962 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700963 m_pDualCamCmdPtr =
964 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700965 }
966
967 LOGH("mCameraId=%d",mCameraId);
968
969 return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION : closeCamera
974 *
975 * DESCRIPTION: close camera
976 *
977 * PARAMETERS : none
978 *
979 * RETURN : int32_t type of status
980 * NO_ERROR -- success
981 * none-zero failure code
982 *==========================================================================*/
983int QCamera3HardwareInterface::closeCamera()
984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700986 int rc = NO_ERROR;
987 char value[PROPERTY_VALUE_MAX];
988
989 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
990 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991
992 // unmap memory for related cam sync buffer
993 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800994 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 if (NULL != m_pDualCamCmdHeap) {
996 m_pDualCamCmdHeap->deallocate();
997 delete m_pDualCamCmdHeap;
998 m_pDualCamCmdHeap = NULL;
999 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001000 }
1001
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1003 mCameraHandle = NULL;
1004
1005 //reset session id to some invalid id
1006 pthread_mutex_lock(&gCamLock);
1007 sessionId[mCameraId] = 0xDEADBEEF;
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 //Notify display HAL that there is no active camera session
1011 //but avoid calling the same during bootup. Refer to openCamera
1012 //for more details.
1013 property_get("service.bootanim.exit", value, "0");
1014 if (atoi(value) == 1) {
1015 pthread_mutex_lock(&gCamLock);
1016 if (--gNumCameraSessions == 0) {
1017 setCameraLaunchStatus(false);
1018 }
1019 pthread_mutex_unlock(&gCamLock);
1020 }
1021
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 if (mExifParams.debug_params) {
1023 free(mExifParams.debug_params);
1024 mExifParams.debug_params = NULL;
1025 }
1026 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1027 LOGW("Failed to release flash for camera id: %d",
1028 mCameraId);
1029 }
1030 mState = CLOSED;
1031 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1032 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001033
1034 if (gHdrPlusClient != nullptr) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001035 // Disable HDR+ mode.
1036 disableHdrPlusModeLocked();
1037 // Disconnect Easel if it's connected.
1038 pthread_mutex_lock(&gCamLock);
1039 if (gEaselConnected) {
1040 gHdrPlusClient->disconnect();
1041 gEaselConnected = false;
1042 }
1043 pthread_mutex_unlock(&gCamLock);
1044
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001045 rc = gHdrPlusClient->suspendEasel();
1046 if (rc != 0) {
1047 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1048 }
1049 }
1050
Thierry Strudel3d639192016-09-09 11:52:26 -07001051 return rc;
1052}
1053
1054/*===========================================================================
1055 * FUNCTION : initialize
1056 *
1057 * DESCRIPTION: Initialize frameworks callback functions
1058 *
1059 * PARAMETERS :
1060 * @callback_ops : callback function to frameworks
1061 *
1062 * RETURN :
1063 *
1064 *==========================================================================*/
1065int QCamera3HardwareInterface::initialize(
1066 const struct camera3_callback_ops *callback_ops)
1067{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001068 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001069 int rc;
1070
1071 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1072 pthread_mutex_lock(&mMutex);
1073
1074 // Validate current state
1075 switch (mState) {
1076 case OPENED:
1077 /* valid state */
1078 break;
1079 default:
1080 LOGE("Invalid state %d", mState);
1081 rc = -ENODEV;
1082 goto err1;
1083 }
1084
1085 rc = initParameters();
1086 if (rc < 0) {
1087 LOGE("initParamters failed %d", rc);
1088 goto err1;
1089 }
1090 mCallbackOps = callback_ops;
1091
1092 mChannelHandle = mCameraHandle->ops->add_channel(
1093 mCameraHandle->camera_handle, NULL, NULL, this);
1094 if (mChannelHandle == 0) {
1095 LOGE("add_channel failed");
1096 rc = -ENOMEM;
1097 pthread_mutex_unlock(&mMutex);
1098 return rc;
1099 }
1100
1101 pthread_mutex_unlock(&mMutex);
1102 mCameraInitialized = true;
1103 mState = INITIALIZED;
1104 LOGI("X");
1105 return 0;
1106
1107err1:
1108 pthread_mutex_unlock(&mMutex);
1109 return rc;
1110}
1111
1112/*===========================================================================
1113 * FUNCTION : validateStreamDimensions
1114 *
1115 * DESCRIPTION: Check if the configuration requested are those advertised
1116 *
1117 * PARAMETERS :
1118 * @stream_list : streams to be configured
1119 *
1120 * RETURN :
1121 *
1122 *==========================================================================*/
1123int QCamera3HardwareInterface::validateStreamDimensions(
1124 camera3_stream_configuration_t *streamList)
1125{
1126 int rc = NO_ERROR;
1127 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001128 uint32_t depthWidth = 0;
1129 uint32_t depthHeight = 0;
1130 if (mPDSupported) {
1131 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1132 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1133 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001134
1135 camera3_stream_t *inputStream = NULL;
1136 /*
1137 * Loop through all streams to find input stream if it exists*
1138 */
1139 for (size_t i = 0; i< streamList->num_streams; i++) {
1140 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1141 if (inputStream != NULL) {
1142 LOGE("Error, Multiple input streams requested");
1143 return -EINVAL;
1144 }
1145 inputStream = streamList->streams[i];
1146 }
1147 }
1148 /*
1149 * Loop through all streams requested in configuration
1150 * Check if unsupported sizes have been requested on any of them
1151 */
1152 for (size_t j = 0; j < streamList->num_streams; j++) {
1153 bool sizeFound = false;
1154 camera3_stream_t *newStream = streamList->streams[j];
1155
1156 uint32_t rotatedHeight = newStream->height;
1157 uint32_t rotatedWidth = newStream->width;
1158 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1159 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1160 rotatedHeight = newStream->width;
1161 rotatedWidth = newStream->height;
1162 }
1163
1164 /*
1165 * Sizes are different for each type of stream format check against
1166 * appropriate table.
1167 */
1168 switch (newStream->format) {
1169 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1170 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1171 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001172 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1173 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1174 mPDSupported) {
1175 if ((depthWidth == newStream->width) &&
1176 (depthHeight == newStream->height)) {
1177 sizeFound = true;
1178 }
1179 break;
1180 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001181 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1182 for (size_t i = 0; i < count; i++) {
1183 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1184 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1185 sizeFound = true;
1186 break;
1187 }
1188 }
1189 break;
1190 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1192 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001193 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001194 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001195 if ((depthSamplesCount == newStream->width) &&
1196 (1 == newStream->height)) {
1197 sizeFound = true;
1198 }
1199 break;
1200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001201 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1202 /* Verify set size against generated sizes table */
1203 for (size_t i = 0; i < count; i++) {
1204 if (((int32_t)rotatedWidth ==
1205 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1206 ((int32_t)rotatedHeight ==
1207 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1208 sizeFound = true;
1209 break;
1210 }
1211 }
1212 break;
1213 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1214 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1215 default:
1216 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1217 || newStream->stream_type == CAMERA3_STREAM_INPUT
1218 || IS_USAGE_ZSL(newStream->usage)) {
1219 if (((int32_t)rotatedWidth ==
1220 gCamCapability[mCameraId]->active_array_size.width) &&
1221 ((int32_t)rotatedHeight ==
1222 gCamCapability[mCameraId]->active_array_size.height)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 /* We could potentially break here to enforce ZSL stream
1227 * set from frameworks always is full active array size
1228 * but it is not clear from the spc if framework will always
1229 * follow that, also we have logic to override to full array
1230 * size, so keeping the logic lenient at the moment
1231 */
1232 }
1233 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1234 MAX_SIZES_CNT);
1235 for (size_t i = 0; i < count; i++) {
1236 if (((int32_t)rotatedWidth ==
1237 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1238 ((int32_t)rotatedHeight ==
1239 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1240 sizeFound = true;
1241 break;
1242 }
1243 }
1244 break;
1245 } /* End of switch(newStream->format) */
1246
1247 /* We error out even if a single stream has unsupported size set */
1248 if (!sizeFound) {
1249 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1250 rotatedWidth, rotatedHeight, newStream->format,
1251 gCamCapability[mCameraId]->active_array_size.width,
1252 gCamCapability[mCameraId]->active_array_size.height);
1253 rc = -EINVAL;
1254 break;
1255 }
1256 } /* End of for each stream */
1257 return rc;
1258}
1259
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001260/*===========================================================================
1261 * FUNCTION : validateUsageFlags
1262 *
1263 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1264 *
1265 * PARAMETERS :
1266 * @stream_list : streams to be configured
1267 *
1268 * RETURN :
1269 * NO_ERROR if the usage flags are supported
1270 * error code if usage flags are not supported
1271 *
1272 *==========================================================================*/
1273int QCamera3HardwareInterface::validateUsageFlags(
1274 const camera3_stream_configuration_t* streamList)
1275{
1276 for (size_t j = 0; j < streamList->num_streams; j++) {
1277 const camera3_stream_t *newStream = streamList->streams[j];
1278
1279 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1280 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1281 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1282 continue;
1283 }
1284
1285 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1286 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1287 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1288 bool forcePreviewUBWC = true;
1289 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1290 forcePreviewUBWC = false;
1291 }
1292 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1293 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1294 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1295 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1296 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1297 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1298
1299 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1300 // So color spaces will always match.
1301
1302 // Check whether underlying formats of shared streams match.
1303 if (isVideo && isPreview && videoFormat != previewFormat) {
1304 LOGE("Combined video and preview usage flag is not supported");
1305 return -EINVAL;
1306 }
1307 if (isPreview && isZSL && previewFormat != zslFormat) {
1308 LOGE("Combined preview and zsl usage flag is not supported");
1309 return -EINVAL;
1310 }
1311 if (isVideo && isZSL && videoFormat != zslFormat) {
1312 LOGE("Combined video and zsl usage flag is not supported");
1313 return -EINVAL;
1314 }
1315 }
1316 return NO_ERROR;
1317}
1318
1319/*===========================================================================
1320 * FUNCTION : validateUsageFlagsForEis
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlagsForEis(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1339 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1340
1341 // Because EIS is "hard-coded" for certain use case, and current
1342 // implementation doesn't support shared preview and video on the same
1343 // stream, return failure if EIS is forced on.
1344 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1345 LOGE("Combined video and preview usage flag is not supported due to EIS");
1346 return -EINVAL;
1347 }
1348 }
1349 return NO_ERROR;
1350}
1351
Thierry Strudel3d639192016-09-09 11:52:26 -07001352/*==============================================================================
1353 * FUNCTION : isSupportChannelNeeded
1354 *
1355 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1356 *
1357 * PARAMETERS :
1358 * @stream_list : streams to be configured
1359 * @stream_config_info : the config info for streams to be configured
1360 *
1361 * RETURN : Boolen true/false decision
1362 *
1363 *==========================================================================*/
1364bool QCamera3HardwareInterface::isSupportChannelNeeded(
1365 camera3_stream_configuration_t *streamList,
1366 cam_stream_size_info_t stream_config_info)
1367{
1368 uint32_t i;
1369 bool pprocRequested = false;
1370 /* Check for conditions where PProc pipeline does not have any streams*/
1371 for (i = 0; i < stream_config_info.num_streams; i++) {
1372 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1373 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1374 pprocRequested = true;
1375 break;
1376 }
1377 }
1378
1379 if (pprocRequested == false )
1380 return true;
1381
1382 /* Dummy stream needed if only raw or jpeg streams present */
1383 for (i = 0; i < streamList->num_streams; i++) {
1384 switch(streamList->streams[i]->format) {
1385 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1386 case HAL_PIXEL_FORMAT_RAW10:
1387 case HAL_PIXEL_FORMAT_RAW16:
1388 case HAL_PIXEL_FORMAT_BLOB:
1389 break;
1390 default:
1391 return false;
1392 }
1393 }
1394 return true;
1395}
1396
1397/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001398 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001399 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001400 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001401 *
1402 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001403 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001404 *
1405 * RETURN : int32_t type of status
1406 * NO_ERROR -- success
1407 * none-zero failure code
1408 *
1409 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001410int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001411{
1412 int32_t rc = NO_ERROR;
1413
1414 cam_dimension_t max_dim = {0, 0};
1415 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1416 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1417 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1418 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1419 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1420 }
1421
1422 clear_metadata_buffer(mParameters);
1423
1424 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1425 max_dim);
1426 if (rc != NO_ERROR) {
1427 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1428 return rc;
1429 }
1430
1431 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1432 if (rc != NO_ERROR) {
1433 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1434 return rc;
1435 }
1436
1437 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001439
1440 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1441 mParameters);
1442 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 return rc;
1445 }
1446
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001447 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001448 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1449 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1450 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1451 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1452 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001453
1454 return rc;
1455}
1456
1457/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 * FUNCTION : addToPPFeatureMask
1459 *
1460 * DESCRIPTION: add additional features to pp feature mask based on
1461 * stream type and usecase
1462 *
1463 * PARAMETERS :
1464 * @stream_format : stream type for feature mask
1465 * @stream_idx : stream idx within postprocess_mask list to change
1466 *
1467 * RETURN : NULL
1468 *
1469 *==========================================================================*/
1470void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1471 uint32_t stream_idx)
1472{
1473 char feature_mask_value[PROPERTY_VALUE_MAX];
1474 cam_feature_mask_t feature_mask;
1475 int args_converted;
1476 int property_len;
1477
1478 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001479#ifdef _LE_CAMERA_
1480 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1481 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1482 property_len = property_get("persist.camera.hal3.feature",
1483 feature_mask_value, swtnr_feature_mask_value);
1484#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001485 property_len = property_get("persist.camera.hal3.feature",
1486 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1489 (feature_mask_value[1] == 'x')) {
1490 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1491 } else {
1492 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1493 }
1494 if (1 != args_converted) {
1495 feature_mask = 0;
1496 LOGE("Wrong feature mask %s", feature_mask_value);
1497 return;
1498 }
1499
1500 switch (stream_format) {
1501 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1502 /* Add LLVD to pp feature mask only if video hint is enabled */
1503 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1504 mStreamConfigInfo.postprocess_mask[stream_idx]
1505 |= CAM_QTI_FEATURE_SW_TNR;
1506 LOGH("Added SW TNR to pp feature mask");
1507 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1508 mStreamConfigInfo.postprocess_mask[stream_idx]
1509 |= CAM_QCOM_FEATURE_LLVD;
1510 LOGH("Added LLVD SeeMore to pp feature mask");
1511 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001512 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1513 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1514 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1515 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001516 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1517 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1518 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1519 CAM_QTI_FEATURE_BINNING_CORRECTION;
1520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001521 break;
1522 }
1523 default:
1524 break;
1525 }
1526 LOGD("PP feature mask %llx",
1527 mStreamConfigInfo.postprocess_mask[stream_idx]);
1528}
1529
1530/*==============================================================================
1531 * FUNCTION : updateFpsInPreviewBuffer
1532 *
1533 * DESCRIPTION: update FPS information in preview buffer.
1534 *
1535 * PARAMETERS :
1536 * @metadata : pointer to metadata buffer
1537 * @frame_number: frame_number to look for in pending buffer list
1538 *
1539 * RETURN : None
1540 *
1541 *==========================================================================*/
1542void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1543 uint32_t frame_number)
1544{
1545 // Mark all pending buffers for this particular request
1546 // with corresponding framerate information
1547 for (List<PendingBuffersInRequest>::iterator req =
1548 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1549 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1550 for(List<PendingBufferInfo>::iterator j =
1551 req->mPendingBufferList.begin();
1552 j != req->mPendingBufferList.end(); j++) {
1553 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1554 if ((req->frame_number == frame_number) &&
1555 (channel->getStreamTypeMask() &
1556 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1557 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1558 CAM_INTF_PARM_FPS_RANGE, metadata) {
1559 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1560 struct private_handle_t *priv_handle =
1561 (struct private_handle_t *)(*(j->buffer));
1562 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1563 }
1564 }
1565 }
1566 }
1567}
1568
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001569/*==============================================================================
1570 * FUNCTION : updateTimeStampInPendingBuffers
1571 *
1572 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1573 * of a frame number
1574 *
1575 * PARAMETERS :
1576 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1577 * @timestamp : timestamp to be set
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1583 uint32_t frameNumber, nsecs_t timestamp)
1584{
1585 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1586 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1587 if (req->frame_number != frameNumber)
1588 continue;
1589
1590 for (auto k = req->mPendingBufferList.begin();
1591 k != req->mPendingBufferList.end(); k++ ) {
1592 struct private_handle_t *priv_handle =
1593 (struct private_handle_t *) (*(k->buffer));
1594 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1595 }
1596 }
1597 return;
1598}
1599
Thierry Strudel3d639192016-09-09 11:52:26 -07001600/*===========================================================================
1601 * FUNCTION : configureStreams
1602 *
1603 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1604 * and output streams.
1605 *
1606 * PARAMETERS :
1607 * @stream_list : streams to be configured
1608 *
1609 * RETURN :
1610 *
1611 *==========================================================================*/
1612int QCamera3HardwareInterface::configureStreams(
1613 camera3_stream_configuration_t *streamList)
1614{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001615 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 int rc = 0;
1617
1618 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001619 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001621 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001622
1623 return rc;
1624}
1625
1626/*===========================================================================
1627 * FUNCTION : configureStreamsPerfLocked
1628 *
1629 * DESCRIPTION: configureStreams while perfLock is held.
1630 *
1631 * PARAMETERS :
1632 * @stream_list : streams to be configured
1633 *
1634 * RETURN : int32_t type of status
1635 * NO_ERROR -- success
1636 * none-zero failure code
1637 *==========================================================================*/
1638int QCamera3HardwareInterface::configureStreamsPerfLocked(
1639 camera3_stream_configuration_t *streamList)
1640{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001641 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001642 int rc = 0;
1643
1644 // Sanity check stream_list
1645 if (streamList == NULL) {
1646 LOGE("NULL stream configuration");
1647 return BAD_VALUE;
1648 }
1649 if (streamList->streams == NULL) {
1650 LOGE("NULL stream list");
1651 return BAD_VALUE;
1652 }
1653
1654 if (streamList->num_streams < 1) {
1655 LOGE("Bad number of streams requested: %d",
1656 streamList->num_streams);
1657 return BAD_VALUE;
1658 }
1659
1660 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1661 LOGE("Maximum number of streams %d exceeded: %d",
1662 MAX_NUM_STREAMS, streamList->num_streams);
1663 return BAD_VALUE;
1664 }
1665
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001666 rc = validateUsageFlags(streamList);
1667 if (rc != NO_ERROR) {
1668 return rc;
1669 }
1670
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 mOpMode = streamList->operation_mode;
1672 LOGD("mOpMode: %d", mOpMode);
1673
1674 /* first invalidate all the steams in the mStreamList
1675 * if they appear again, they will be validated */
1676 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1677 it != mStreamInfo.end(); it++) {
1678 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1679 if (channel) {
1680 channel->stop();
1681 }
1682 (*it)->status = INVALID;
1683 }
1684
1685 if (mRawDumpChannel) {
1686 mRawDumpChannel->stop();
1687 delete mRawDumpChannel;
1688 mRawDumpChannel = NULL;
1689 }
1690
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001691 if (mHdrPlusRawSrcChannel) {
1692 mHdrPlusRawSrcChannel->stop();
1693 delete mHdrPlusRawSrcChannel;
1694 mHdrPlusRawSrcChannel = NULL;
1695 }
1696
Thierry Strudel3d639192016-09-09 11:52:26 -07001697 if (mSupportChannel)
1698 mSupportChannel->stop();
1699
1700 if (mAnalysisChannel) {
1701 mAnalysisChannel->stop();
1702 }
1703 if (mMetadataChannel) {
1704 /* If content of mStreamInfo is not 0, there is metadata stream */
1705 mMetadataChannel->stop();
1706 }
1707 if (mChannelHandle) {
1708 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1709 mChannelHandle);
1710 LOGD("stopping channel %d", mChannelHandle);
1711 }
1712
1713 pthread_mutex_lock(&mMutex);
1714
1715 // Check state
1716 switch (mState) {
1717 case INITIALIZED:
1718 case CONFIGURED:
1719 case STARTED:
1720 /* valid state */
1721 break;
1722 default:
1723 LOGE("Invalid state %d", mState);
1724 pthread_mutex_unlock(&mMutex);
1725 return -ENODEV;
1726 }
1727
1728 /* Check whether we have video stream */
1729 m_bIs4KVideo = false;
1730 m_bIsVideo = false;
1731 m_bEisSupportedSize = false;
1732 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001733 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001734 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001735 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001736 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 uint32_t videoWidth = 0U;
1738 uint32_t videoHeight = 0U;
1739 size_t rawStreamCnt = 0;
1740 size_t stallStreamCnt = 0;
1741 size_t processedStreamCnt = 0;
1742 // Number of streams on ISP encoder path
1743 size_t numStreamsOnEncoder = 0;
1744 size_t numYuv888OnEncoder = 0;
1745 bool bYuv888OverrideJpeg = false;
1746 cam_dimension_t largeYuv888Size = {0, 0};
1747 cam_dimension_t maxViewfinderSize = {0, 0};
1748 bool bJpegExceeds4K = false;
1749 bool bJpegOnEncoder = false;
1750 bool bUseCommonFeatureMask = false;
1751 cam_feature_mask_t commonFeatureMask = 0;
1752 bool bSmallJpegSize = false;
1753 uint32_t width_ratio;
1754 uint32_t height_ratio;
1755 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1756 camera3_stream_t *inputStream = NULL;
1757 bool isJpeg = false;
1758 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001759 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001760 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001761
1762 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1763
1764 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001765 uint8_t eis_prop_set;
1766 uint32_t maxEisWidth = 0;
1767 uint32_t maxEisHeight = 0;
1768
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001769 // Initialize all instant AEC related variables
1770 mInstantAEC = false;
1771 mResetInstantAEC = false;
1772 mInstantAECSettledFrameNumber = 0;
1773 mAecSkipDisplayFrameBound = 0;
1774 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001775 mCurrFeatureState = 0;
1776 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001777
Thierry Strudel3d639192016-09-09 11:52:26 -07001778 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1779
1780 size_t count = IS_TYPE_MAX;
1781 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1782 for (size_t i = 0; i < count; i++) {
1783 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001784 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1785 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001786 break;
1787 }
1788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001789
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001790 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001791 maxEisWidth = MAX_EIS_WIDTH;
1792 maxEisHeight = MAX_EIS_HEIGHT;
1793 }
1794
1795 /* EIS setprop control */
1796 char eis_prop[PROPERTY_VALUE_MAX];
1797 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001798 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 eis_prop_set = (uint8_t)atoi(eis_prop);
1800
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001801 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001802 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1803
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001804 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1805 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001806
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 /* stream configurations */
1808 for (size_t i = 0; i < streamList->num_streams; i++) {
1809 camera3_stream_t *newStream = streamList->streams[i];
1810 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1811 "height = %d, rotation = %d, usage = 0x%x",
1812 i, newStream->stream_type, newStream->format,
1813 newStream->width, newStream->height, newStream->rotation,
1814 newStream->usage);
1815 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1816 newStream->stream_type == CAMERA3_STREAM_INPUT){
1817 isZsl = true;
1818 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001819 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1820 IS_USAGE_PREVIEW(newStream->usage)) {
1821 isPreview = true;
1822 }
1823
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1825 inputStream = newStream;
1826 }
1827
Emilian Peev7650c122017-01-19 08:24:33 -08001828 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1829 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 isJpeg = true;
1831 jpegSize.width = newStream->width;
1832 jpegSize.height = newStream->height;
1833 if (newStream->width > VIDEO_4K_WIDTH ||
1834 newStream->height > VIDEO_4K_HEIGHT)
1835 bJpegExceeds4K = true;
1836 }
1837
1838 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1839 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1840 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001841 // In HAL3 we can have multiple different video streams.
1842 // The variables video width and height are used below as
1843 // dimensions of the biggest of them
1844 if (videoWidth < newStream->width ||
1845 videoHeight < newStream->height) {
1846 videoWidth = newStream->width;
1847 videoHeight = newStream->height;
1848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1850 (VIDEO_4K_HEIGHT <= newStream->height)) {
1851 m_bIs4KVideo = true;
1852 }
1853 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1854 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001855
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 }
1857 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1858 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1859 switch (newStream->format) {
1860 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001861 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1862 depthPresent = true;
1863 break;
1864 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 stallStreamCnt++;
1866 if (isOnEncoder(maxViewfinderSize, newStream->width,
1867 newStream->height)) {
1868 numStreamsOnEncoder++;
1869 bJpegOnEncoder = true;
1870 }
1871 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1872 newStream->width);
1873 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1874 newStream->height);;
1875 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1876 "FATAL: max_downscale_factor cannot be zero and so assert");
1877 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1878 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1879 LOGH("Setting small jpeg size flag to true");
1880 bSmallJpegSize = true;
1881 }
1882 break;
1883 case HAL_PIXEL_FORMAT_RAW10:
1884 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1885 case HAL_PIXEL_FORMAT_RAW16:
1886 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1888 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1889 pdStatCount++;
1890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 break;
1892 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1893 processedStreamCnt++;
1894 if (isOnEncoder(maxViewfinderSize, newStream->width,
1895 newStream->height)) {
1896 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1897 !IS_USAGE_ZSL(newStream->usage)) {
1898 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900 numStreamsOnEncoder++;
1901 }
1902 break;
1903 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1904 processedStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 // If Yuv888 size is not greater than 4K, set feature mask
1908 // to SUPERSET so that it support concurrent request on
1909 // YUV and JPEG.
1910 if (newStream->width <= VIDEO_4K_WIDTH &&
1911 newStream->height <= VIDEO_4K_HEIGHT) {
1912 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 }
1914 numStreamsOnEncoder++;
1915 numYuv888OnEncoder++;
1916 largeYuv888Size.width = newStream->width;
1917 largeYuv888Size.height = newStream->height;
1918 }
1919 break;
1920 default:
1921 processedStreamCnt++;
1922 if (isOnEncoder(maxViewfinderSize, newStream->width,
1923 newStream->height)) {
1924 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1925 numStreamsOnEncoder++;
1926 }
1927 break;
1928 }
1929
1930 }
1931 }
1932
1933 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1934 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1935 !m_bIsVideo) {
1936 m_bEisEnable = false;
1937 }
1938
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001939 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1940 pthread_mutex_unlock(&mMutex);
1941 return -EINVAL;
1942 }
1943
Thierry Strudel54dc9782017-02-15 12:12:10 -08001944 uint8_t forceEnableTnr = 0;
1945 char tnr_prop[PROPERTY_VALUE_MAX];
1946 memset(tnr_prop, 0, sizeof(tnr_prop));
1947 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1948 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 /* Logic to enable/disable TNR based on specific config size/etc.*/
1951 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1952 ((videoWidth == 1920 && videoHeight == 1080) ||
1953 (videoWidth == 1280 && videoHeight == 720)) &&
1954 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1955 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001956 else if (forceEnableTnr)
1957 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001958
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001959 char videoHdrProp[PROPERTY_VALUE_MAX];
1960 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1961 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1962 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1963
1964 if (hdr_mode_prop == 1 && m_bIsVideo &&
1965 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1966 m_bVideoHdrEnabled = true;
1967 else
1968 m_bVideoHdrEnabled = false;
1969
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 /* Check if num_streams is sane */
1972 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1973 rawStreamCnt > MAX_RAW_STREAMS ||
1974 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1975 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1976 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1977 pthread_mutex_unlock(&mMutex);
1978 return -EINVAL;
1979 }
1980 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001981 if (isZsl && m_bIs4KVideo) {
1982 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 pthread_mutex_unlock(&mMutex);
1984 return -EINVAL;
1985 }
1986 /* Check if stream sizes are sane */
1987 if (numStreamsOnEncoder > 2) {
1988 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1989 pthread_mutex_unlock(&mMutex);
1990 return -EINVAL;
1991 } else if (1 < numStreamsOnEncoder){
1992 bUseCommonFeatureMask = true;
1993 LOGH("Multiple streams above max viewfinder size, common mask needed");
1994 }
1995
1996 /* Check if BLOB size is greater than 4k in 4k recording case */
1997 if (m_bIs4KVideo && bJpegExceeds4K) {
1998 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Emilian Peev7650c122017-01-19 08:24:33 -08002003 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2004 depthPresent) {
2005 LOGE("HAL doesn't support depth streams in HFR mode!");
2006 pthread_mutex_unlock(&mMutex);
2007 return -EINVAL;
2008 }
2009
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2011 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2012 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2013 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2014 // configurations:
2015 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2016 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2017 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2018 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2019 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2020 __func__);
2021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024
2025 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2026 // the YUV stream's size is greater or equal to the JPEG size, set common
2027 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2028 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2029 jpegSize.width, jpegSize.height) &&
2030 largeYuv888Size.width > jpegSize.width &&
2031 largeYuv888Size.height > jpegSize.height) {
2032 bYuv888OverrideJpeg = true;
2033 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2034 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035 }
2036
2037 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2038 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2039 commonFeatureMask);
2040 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2041 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2042
2043 rc = validateStreamDimensions(streamList);
2044 if (rc == NO_ERROR) {
2045 rc = validateStreamRotations(streamList);
2046 }
2047 if (rc != NO_ERROR) {
2048 LOGE("Invalid stream configuration requested!");
2049 pthread_mutex_unlock(&mMutex);
2050 return rc;
2051 }
2052
Emilian Peev0f3c3162017-03-15 12:57:46 +00002053 if (1 < pdStatCount) {
2054 LOGE("HAL doesn't support multiple PD streams");
2055 pthread_mutex_unlock(&mMutex);
2056 return -EINVAL;
2057 }
2058
2059 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2060 (1 == pdStatCount)) {
2061 LOGE("HAL doesn't support PD streams in HFR mode!");
2062 pthread_mutex_unlock(&mMutex);
2063 return -EINVAL;
2064 }
2065
Thierry Strudel3d639192016-09-09 11:52:26 -07002066 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2067 for (size_t i = 0; i < streamList->num_streams; i++) {
2068 camera3_stream_t *newStream = streamList->streams[i];
2069 LOGH("newStream type = %d, stream format = %d "
2070 "stream size : %d x %d, stream rotation = %d",
2071 newStream->stream_type, newStream->format,
2072 newStream->width, newStream->height, newStream->rotation);
2073 //if the stream is in the mStreamList validate it
2074 bool stream_exists = false;
2075 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2076 it != mStreamInfo.end(); it++) {
2077 if ((*it)->stream == newStream) {
2078 QCamera3ProcessingChannel *channel =
2079 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2080 stream_exists = true;
2081 if (channel)
2082 delete channel;
2083 (*it)->status = VALID;
2084 (*it)->stream->priv = NULL;
2085 (*it)->channel = NULL;
2086 }
2087 }
2088 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2089 //new stream
2090 stream_info_t* stream_info;
2091 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2092 if (!stream_info) {
2093 LOGE("Could not allocate stream info");
2094 rc = -ENOMEM;
2095 pthread_mutex_unlock(&mMutex);
2096 return rc;
2097 }
2098 stream_info->stream = newStream;
2099 stream_info->status = VALID;
2100 stream_info->channel = NULL;
2101 mStreamInfo.push_back(stream_info);
2102 }
2103 /* Covers Opaque ZSL and API1 F/W ZSL */
2104 if (IS_USAGE_ZSL(newStream->usage)
2105 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2106 if (zslStream != NULL) {
2107 LOGE("Multiple input/reprocess streams requested!");
2108 pthread_mutex_unlock(&mMutex);
2109 return BAD_VALUE;
2110 }
2111 zslStream = newStream;
2112 }
2113 /* Covers YUV reprocess */
2114 if (inputStream != NULL) {
2115 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2116 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2117 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2118 && inputStream->width == newStream->width
2119 && inputStream->height == newStream->height) {
2120 if (zslStream != NULL) {
2121 /* This scenario indicates multiple YUV streams with same size
2122 * as input stream have been requested, since zsl stream handle
2123 * is solely use for the purpose of overriding the size of streams
2124 * which share h/w streams we will just make a guess here as to
2125 * which of the stream is a ZSL stream, this will be refactored
2126 * once we make generic logic for streams sharing encoder output
2127 */
2128 LOGH("Warning, Multiple ip/reprocess streams requested!");
2129 }
2130 zslStream = newStream;
2131 }
2132 }
2133 }
2134
2135 /* If a zsl stream is set, we know that we have configured at least one input or
2136 bidirectional stream */
2137 if (NULL != zslStream) {
2138 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2139 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2140 mInputStreamInfo.format = zslStream->format;
2141 mInputStreamInfo.usage = zslStream->usage;
2142 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2143 mInputStreamInfo.dim.width,
2144 mInputStreamInfo.dim.height,
2145 mInputStreamInfo.format, mInputStreamInfo.usage);
2146 }
2147
2148 cleanAndSortStreamInfo();
2149 if (mMetadataChannel) {
2150 delete mMetadataChannel;
2151 mMetadataChannel = NULL;
2152 }
2153 if (mSupportChannel) {
2154 delete mSupportChannel;
2155 mSupportChannel = NULL;
2156 }
2157
2158 if (mAnalysisChannel) {
2159 delete mAnalysisChannel;
2160 mAnalysisChannel = NULL;
2161 }
2162
2163 if (mDummyBatchChannel) {
2164 delete mDummyBatchChannel;
2165 mDummyBatchChannel = NULL;
2166 }
2167
Emilian Peev7650c122017-01-19 08:24:33 -08002168 if (mDepthChannel) {
2169 mDepthChannel = NULL;
2170 }
2171
Thierry Strudel2896d122017-02-23 19:18:03 -08002172 char is_type_value[PROPERTY_VALUE_MAX];
2173 property_get("persist.camera.is_type", is_type_value, "4");
2174 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2175
Thierry Strudel3d639192016-09-09 11:52:26 -07002176 //Create metadata channel and initialize it
2177 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2178 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2179 gCamCapability[mCameraId]->color_arrangement);
2180 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2181 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002182 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 if (mMetadataChannel == NULL) {
2184 LOGE("failed to allocate metadata channel");
2185 rc = -ENOMEM;
2186 pthread_mutex_unlock(&mMutex);
2187 return rc;
2188 }
2189 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2190 if (rc < 0) {
2191 LOGE("metadata channel initialization failed");
2192 delete mMetadataChannel;
2193 mMetadataChannel = NULL;
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Thierry Strudel2896d122017-02-23 19:18:03 -08002198 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002199 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002200 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002201 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2202 /* Allocate channel objects for the requested streams */
2203 for (size_t i = 0; i < streamList->num_streams; i++) {
2204 camera3_stream_t *newStream = streamList->streams[i];
2205 uint32_t stream_usage = newStream->usage;
2206 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2207 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2208 struct camera_info *p_info = NULL;
2209 pthread_mutex_lock(&gCamLock);
2210 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2211 pthread_mutex_unlock(&gCamLock);
2212 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2213 || IS_USAGE_ZSL(newStream->usage)) &&
2214 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002215 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002216 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002217 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2218 if (bUseCommonFeatureMask)
2219 zsl_ppmask = commonFeatureMask;
2220 else
2221 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002222 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002223 if (numStreamsOnEncoder > 0)
2224 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2225 else
2226 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002228 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002230 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002231 LOGH("Input stream configured, reprocess config");
2232 } else {
2233 //for non zsl streams find out the format
2234 switch (newStream->format) {
2235 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2236 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002237 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002238 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2239 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2240 /* add additional features to pp feature mask */
2241 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2242 mStreamConfigInfo.num_streams);
2243
2244 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2246 CAM_STREAM_TYPE_VIDEO;
2247 if (m_bTnrEnabled && m_bTnrVideo) {
2248 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2249 CAM_QCOM_FEATURE_CPP_TNR;
2250 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2251 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2252 ~CAM_QCOM_FEATURE_CDS;
2253 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002254 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2255 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2256 CAM_QTI_FEATURE_PPEISCORE;
2257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 } else {
2259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2260 CAM_STREAM_TYPE_PREVIEW;
2261 if (m_bTnrEnabled && m_bTnrPreview) {
2262 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2263 CAM_QCOM_FEATURE_CPP_TNR;
2264 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2265 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2266 ~CAM_QCOM_FEATURE_CDS;
2267 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002268 if(!m_bSwTnrPreview) {
2269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2270 ~CAM_QTI_FEATURE_SW_TNR;
2271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 padding_info.width_padding = mSurfaceStridePadding;
2273 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002274 previewSize.width = (int32_t)newStream->width;
2275 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 }
2277 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2278 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2279 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2280 newStream->height;
2281 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2282 newStream->width;
2283 }
2284 }
2285 break;
2286 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002287 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2289 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2290 if (bUseCommonFeatureMask)
2291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2292 commonFeatureMask;
2293 else
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2295 CAM_QCOM_FEATURE_NONE;
2296 } else {
2297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2298 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2299 }
2300 break;
2301 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002302 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2304 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2305 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2307 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 /* Remove rotation if it is not supported
2309 for 4K LiveVideo snapshot case (online processing) */
2310 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2311 CAM_QCOM_FEATURE_ROTATION)) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2313 &= ~CAM_QCOM_FEATURE_ROTATION;
2314 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 } else {
2316 if (bUseCommonFeatureMask &&
2317 isOnEncoder(maxViewfinderSize, newStream->width,
2318 newStream->height)) {
2319 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2320 } else {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2322 }
2323 }
2324 if (isZsl) {
2325 if (zslStream) {
2326 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2327 (int32_t)zslStream->width;
2328 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2329 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 } else {
2333 LOGE("Error, No ZSL stream identified");
2334 pthread_mutex_unlock(&mMutex);
2335 return -EINVAL;
2336 }
2337 } else if (m_bIs4KVideo) {
2338 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2339 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2340 } else if (bYuv888OverrideJpeg) {
2341 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2342 (int32_t)largeYuv888Size.width;
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2344 (int32_t)largeYuv888Size.height;
2345 }
2346 break;
2347 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2348 case HAL_PIXEL_FORMAT_RAW16:
2349 case HAL_PIXEL_FORMAT_RAW10:
2350 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2351 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2352 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002353 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2354 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2355 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2356 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2357 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2358 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2359 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2360 gCamCapability[mCameraId]->dt[mPDIndex];
2361 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2362 gCamCapability[mCameraId]->vc[mPDIndex];
2363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 break;
2365 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2369 break;
2370 }
2371 }
2372
2373 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2374 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2375 gCamCapability[mCameraId]->color_arrangement);
2376
2377 if (newStream->priv == NULL) {
2378 //New stream, construct channel
2379 switch (newStream->stream_type) {
2380 case CAMERA3_STREAM_INPUT:
2381 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2382 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2383 break;
2384 case CAMERA3_STREAM_BIDIRECTIONAL:
2385 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2386 GRALLOC_USAGE_HW_CAMERA_WRITE;
2387 break;
2388 case CAMERA3_STREAM_OUTPUT:
2389 /* For video encoding stream, set read/write rarely
2390 * flag so that they may be set to un-cached */
2391 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2392 newStream->usage |=
2393 (GRALLOC_USAGE_SW_READ_RARELY |
2394 GRALLOC_USAGE_SW_WRITE_RARELY |
2395 GRALLOC_USAGE_HW_CAMERA_WRITE);
2396 else if (IS_USAGE_ZSL(newStream->usage))
2397 {
2398 LOGD("ZSL usage flag skipping");
2399 }
2400 else if (newStream == zslStream
2401 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2402 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2403 } else
2404 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2405 break;
2406 default:
2407 LOGE("Invalid stream_type %d", newStream->stream_type);
2408 break;
2409 }
2410
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002411 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002412 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2413 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2414 QCamera3ProcessingChannel *channel = NULL;
2415 switch (newStream->format) {
2416 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2417 if ((newStream->usage &
2418 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2419 (streamList->operation_mode ==
2420 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2421 ) {
2422 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2423 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002424 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002425 this,
2426 newStream,
2427 (cam_stream_type_t)
2428 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2430 mMetadataChannel,
2431 0); //heap buffers are not required for HFR video channel
2432 if (channel == NULL) {
2433 LOGE("allocation of channel failed");
2434 pthread_mutex_unlock(&mMutex);
2435 return -ENOMEM;
2436 }
2437 //channel->getNumBuffers() will return 0 here so use
2438 //MAX_INFLIGH_HFR_REQUESTS
2439 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2440 newStream->priv = channel;
2441 LOGI("num video buffers in HFR mode: %d",
2442 MAX_INFLIGHT_HFR_REQUESTS);
2443 } else {
2444 /* Copy stream contents in HFR preview only case to create
2445 * dummy batch channel so that sensor streaming is in
2446 * HFR mode */
2447 if (!m_bIsVideo && (streamList->operation_mode ==
2448 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2449 mDummyBatchStream = *newStream;
2450 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002451 int bufferCount = MAX_INFLIGHT_REQUESTS;
2452 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2453 CAM_STREAM_TYPE_VIDEO) {
2454 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2455 bufferCount = MAX_VIDEO_BUFFERS;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2458 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002459 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002460 this,
2461 newStream,
2462 (cam_stream_type_t)
2463 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2465 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002467 if (channel == NULL) {
2468 LOGE("allocation of channel failed");
2469 pthread_mutex_unlock(&mMutex);
2470 return -ENOMEM;
2471 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 /* disable UBWC for preview, though supported,
2473 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002474 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002475 (previewSize.width == (int32_t)videoWidth)&&
2476 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002477 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002479 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 newStream->max_buffers = channel->getNumBuffers();
2481 newStream->priv = channel;
2482 }
2483 break;
2484 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2485 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2486 mChannelHandle,
2487 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel);
2495 if (channel == NULL) {
2496 LOGE("allocation of YUV channel failed");
2497 pthread_mutex_unlock(&mMutex);
2498 return -ENOMEM;
2499 }
2500 newStream->max_buffers = channel->getNumBuffers();
2501 newStream->priv = channel;
2502 break;
2503 }
2504 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2505 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002506 case HAL_PIXEL_FORMAT_RAW10: {
2507 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2508 (HAL_DATASPACE_DEPTH != newStream->data_space))
2509 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mRawChannel = new QCamera3RawChannel(
2511 mCameraHandle->camera_handle, mChannelHandle,
2512 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002513 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002514 this, newStream,
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002516 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002517 if (mRawChannel == NULL) {
2518 LOGE("allocation of raw channel failed");
2519 pthread_mutex_unlock(&mMutex);
2520 return -ENOMEM;
2521 }
2522 newStream->max_buffers = mRawChannel->getNumBuffers();
2523 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2524 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002525 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002527 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2528 mDepthChannel = new QCamera3DepthChannel(
2529 mCameraHandle->camera_handle, mChannelHandle,
2530 mCameraHandle->ops, NULL, NULL, &padding_info,
2531 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2532 mMetadataChannel);
2533 if (NULL == mDepthChannel) {
2534 LOGE("Allocation of depth channel failed");
2535 pthread_mutex_unlock(&mMutex);
2536 return NO_MEMORY;
2537 }
2538 newStream->priv = mDepthChannel;
2539 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2540 } else {
2541 // Max live snapshot inflight buffer is 1. This is to mitigate
2542 // frame drop issues for video snapshot. The more buffers being
2543 // allocated, the more frame drops there are.
2544 mPictureChannel = new QCamera3PicChannel(
2545 mCameraHandle->camera_handle, mChannelHandle,
2546 mCameraHandle->ops, captureResultCb,
2547 setBufferErrorStatus, &padding_info, this, newStream,
2548 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2549 m_bIs4KVideo, isZsl, mMetadataChannel,
2550 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2551 if (mPictureChannel == NULL) {
2552 LOGE("allocation of channel failed");
2553 pthread_mutex_unlock(&mMutex);
2554 return -ENOMEM;
2555 }
2556 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2557 newStream->max_buffers = mPictureChannel->getNumBuffers();
2558 mPictureChannel->overrideYuvSize(
2559 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2560 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002562 break;
2563
2564 default:
2565 LOGE("not a supported format 0x%x", newStream->format);
2566 break;
2567 }
2568 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2569 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2570 } else {
2571 LOGE("Error, Unknown stream type");
2572 pthread_mutex_unlock(&mMutex);
2573 return -EINVAL;
2574 }
2575
2576 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002577 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2578 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002579 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002580 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2582 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2583 }
2584 }
2585
2586 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2587 it != mStreamInfo.end(); it++) {
2588 if ((*it)->stream == newStream) {
2589 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2590 break;
2591 }
2592 }
2593 } else {
2594 // Channel already exists for this stream
2595 // Do nothing for now
2596 }
2597 padding_info = gCamCapability[mCameraId]->padding_info;
2598
Emilian Peev7650c122017-01-19 08:24:33 -08002599 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 * since there is no real stream associated with it
2601 */
Emilian Peev7650c122017-01-19 08:24:33 -08002602 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002603 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2604 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 }
2608
Thierry Strudel2896d122017-02-23 19:18:03 -08002609 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2610 onlyRaw = false;
2611 }
2612
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002613 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002615 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002616 cam_analysis_info_t analysisInfo;
2617 int32_t ret = NO_ERROR;
2618 ret = mCommon.getAnalysisInfo(
2619 FALSE,
2620 analysisFeatureMask,
2621 &analysisInfo);
2622 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002623 cam_color_filter_arrangement_t analysis_color_arrangement =
2624 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2625 CAM_FILTER_ARRANGEMENT_Y :
2626 gCamCapability[mCameraId]->color_arrangement);
2627 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2628 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002629 cam_dimension_t analysisDim;
2630 analysisDim = mCommon.getMatchingDimension(previewSize,
2631 analysisInfo.analysis_recommended_res);
2632
2633 mAnalysisChannel = new QCamera3SupportChannel(
2634 mCameraHandle->camera_handle,
2635 mChannelHandle,
2636 mCameraHandle->ops,
2637 &analysisInfo.analysis_padding_info,
2638 analysisFeatureMask,
2639 CAM_STREAM_TYPE_ANALYSIS,
2640 &analysisDim,
2641 (analysisInfo.analysis_format
2642 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2643 : CAM_FORMAT_YUV_420_NV21),
2644 analysisInfo.hw_analysis_supported,
2645 gCamCapability[mCameraId]->color_arrangement,
2646 this,
2647 0); // force buffer count to 0
2648 } else {
2649 LOGW("getAnalysisInfo failed, ret = %d", ret);
2650 }
2651 if (!mAnalysisChannel) {
2652 LOGW("Analysis channel cannot be created");
2653 }
2654 }
2655
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 //RAW DUMP channel
2657 if (mEnableRawDump && isRawStreamRequested == false){
2658 cam_dimension_t rawDumpSize;
2659 rawDumpSize = getMaxRawSize(mCameraId);
2660 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2661 setPAAFSupport(rawDumpFeatureMask,
2662 CAM_STREAM_TYPE_RAW,
2663 gCamCapability[mCameraId]->color_arrangement);
2664 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2665 mChannelHandle,
2666 mCameraHandle->ops,
2667 rawDumpSize,
2668 &padding_info,
2669 this, rawDumpFeatureMask);
2670 if (!mRawDumpChannel) {
2671 LOGE("Raw Dump channel cannot be created");
2672 pthread_mutex_unlock(&mMutex);
2673 return -ENOMEM;
2674 }
2675 }
2676
Chien-Yu Chenee335912017-02-09 17:53:20 -08002677 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002678 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002679 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002680 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2681 "HDR+ RAW source channel is not created.",
2682 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002683 } else {
2684 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2685 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2686 setPAAFSupport(hdrPlusRawFeatureMask,
2687 CAM_STREAM_TYPE_RAW,
2688 gCamCapability[mCameraId]->color_arrangement);
2689 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2690 mChannelHandle,
2691 mCameraHandle->ops,
2692 rawSize,
2693 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002694 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002695 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002696 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002697 if (!mHdrPlusRawSrcChannel) {
2698 LOGE("HDR+ Raw Source channel cannot be created");
2699 pthread_mutex_unlock(&mMutex);
2700 return -ENOMEM;
2701 }
2702 }
2703 }
2704
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 if (mAnalysisChannel) {
2706 cam_analysis_info_t analysisInfo;
2707 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2708 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2709 CAM_STREAM_TYPE_ANALYSIS;
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2711 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002712 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2714 &analysisInfo);
2715 if (rc != NO_ERROR) {
2716 LOGE("getAnalysisInfo failed, ret = %d", rc);
2717 pthread_mutex_unlock(&mMutex);
2718 return rc;
2719 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002720 cam_color_filter_arrangement_t analysis_color_arrangement =
2721 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2722 CAM_FILTER_ARRANGEMENT_Y :
2723 gCamCapability[mCameraId]->color_arrangement);
2724 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2725 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2726 analysis_color_arrangement);
2727
Thierry Strudel3d639192016-09-09 11:52:26 -07002728 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002729 mCommon.getMatchingDimension(previewSize,
2730 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 mStreamConfigInfo.num_streams++;
2732 }
2733
Thierry Strudel2896d122017-02-23 19:18:03 -08002734 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 cam_analysis_info_t supportInfo;
2736 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2737 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2738 setPAAFSupport(callbackFeatureMask,
2739 CAM_STREAM_TYPE_CALLBACK,
2740 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002741 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002742 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002743 if (ret != NO_ERROR) {
2744 /* Ignore the error for Mono camera
2745 * because the PAAF bit mask is only set
2746 * for CAM_STREAM_TYPE_ANALYSIS stream type
2747 */
2748 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2749 LOGW("getAnalysisInfo failed, ret = %d", ret);
2750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 }
2752 mSupportChannel = new QCamera3SupportChannel(
2753 mCameraHandle->camera_handle,
2754 mChannelHandle,
2755 mCameraHandle->ops,
2756 &gCamCapability[mCameraId]->padding_info,
2757 callbackFeatureMask,
2758 CAM_STREAM_TYPE_CALLBACK,
2759 &QCamera3SupportChannel::kDim,
2760 CAM_FORMAT_YUV_420_NV21,
2761 supportInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002763 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 if (!mSupportChannel) {
2765 LOGE("dummy channel cannot be created");
2766 pthread_mutex_unlock(&mMutex);
2767 return -ENOMEM;
2768 }
2769 }
2770
2771 if (mSupportChannel) {
2772 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2773 QCamera3SupportChannel::kDim;
2774 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2775 CAM_STREAM_TYPE_CALLBACK;
2776 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2777 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2778 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2779 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mStreamConfigInfo.num_streams++;
2782 }
2783
2784 if (mRawDumpChannel) {
2785 cam_dimension_t rawSize;
2786 rawSize = getMaxRawSize(mCameraId);
2787 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2788 rawSize;
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2790 CAM_STREAM_TYPE_RAW;
2791 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2792 CAM_QCOM_FEATURE_NONE;
2793 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2794 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2795 gCamCapability[mCameraId]->color_arrangement);
2796 mStreamConfigInfo.num_streams++;
2797 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002798
2799 if (mHdrPlusRawSrcChannel) {
2800 cam_dimension_t rawSize;
2801 rawSize = getMaxRawSize(mCameraId);
2802 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2803 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2804 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2805 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2806 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2807 gCamCapability[mCameraId]->color_arrangement);
2808 mStreamConfigInfo.num_streams++;
2809 }
2810
Thierry Strudel3d639192016-09-09 11:52:26 -07002811 /* In HFR mode, if video stream is not added, create a dummy channel so that
2812 * ISP can create a batch mode even for preview only case. This channel is
2813 * never 'start'ed (no stream-on), it is only 'initialized' */
2814 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2815 !m_bIsVideo) {
2816 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2817 setPAAFSupport(dummyFeatureMask,
2818 CAM_STREAM_TYPE_VIDEO,
2819 gCamCapability[mCameraId]->color_arrangement);
2820 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2821 mChannelHandle,
2822 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002823 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 this,
2825 &mDummyBatchStream,
2826 CAM_STREAM_TYPE_VIDEO,
2827 dummyFeatureMask,
2828 mMetadataChannel);
2829 if (NULL == mDummyBatchChannel) {
2830 LOGE("creation of mDummyBatchChannel failed."
2831 "Preview will use non-hfr sensor mode ");
2832 }
2833 }
2834 if (mDummyBatchChannel) {
2835 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2836 mDummyBatchStream.width;
2837 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2838 mDummyBatchStream.height;
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2840 CAM_STREAM_TYPE_VIDEO;
2841 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2842 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2843 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2844 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2845 gCamCapability[mCameraId]->color_arrangement);
2846 mStreamConfigInfo.num_streams++;
2847 }
2848
2849 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2850 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002851 m_bIs4KVideo ? 0 :
2852 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002853
2854 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2855 for (pendingRequestIterator i = mPendingRequestsList.begin();
2856 i != mPendingRequestsList.end();) {
2857 i = erasePendingRequest(i);
2858 }
2859 mPendingFrameDropList.clear();
2860 // Initialize/Reset the pending buffers list
2861 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2862 req.mPendingBufferList.clear();
2863 }
2864 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2865
Thierry Strudel3d639192016-09-09 11:52:26 -07002866 mCurJpegMeta.clear();
2867 //Get min frame duration for this streams configuration
2868 deriveMinFrameDuration();
2869
Chien-Yu Chenee335912017-02-09 17:53:20 -08002870 mFirstPreviewIntentSeen = false;
2871
2872 // Disable HRD+ if it's enabled;
2873 disableHdrPlusModeLocked();
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 // Update state
2876 mState = CONFIGURED;
2877
2878 pthread_mutex_unlock(&mMutex);
2879
2880 return rc;
2881}
2882
2883/*===========================================================================
2884 * FUNCTION : validateCaptureRequest
2885 *
2886 * DESCRIPTION: validate a capture request from camera service
2887 *
2888 * PARAMETERS :
2889 * @request : request from framework to process
2890 *
2891 * RETURN :
2892 *
2893 *==========================================================================*/
2894int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002895 camera3_capture_request_t *request,
2896 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002897{
2898 ssize_t idx = 0;
2899 const camera3_stream_buffer_t *b;
2900 CameraMetadata meta;
2901
2902 /* Sanity check the request */
2903 if (request == NULL) {
2904 LOGE("NULL capture request");
2905 return BAD_VALUE;
2906 }
2907
2908 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2909 /*settings cannot be null for the first request*/
2910 return BAD_VALUE;
2911 }
2912
2913 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002914 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2915 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002916 LOGE("Request %d: No output buffers provided!",
2917 __FUNCTION__, frameNumber);
2918 return BAD_VALUE;
2919 }
2920 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2921 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2922 request->num_output_buffers, MAX_NUM_STREAMS);
2923 return BAD_VALUE;
2924 }
2925 if (request->input_buffer != NULL) {
2926 b = request->input_buffer;
2927 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2928 LOGE("Request %d: Buffer %ld: Status not OK!",
2929 frameNumber, (long)idx);
2930 return BAD_VALUE;
2931 }
2932 if (b->release_fence != -1) {
2933 LOGE("Request %d: Buffer %ld: Has a release fence!",
2934 frameNumber, (long)idx);
2935 return BAD_VALUE;
2936 }
2937 if (b->buffer == NULL) {
2938 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2939 frameNumber, (long)idx);
2940 return BAD_VALUE;
2941 }
2942 }
2943
2944 // Validate all buffers
2945 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002946 if (b == NULL) {
2947 return BAD_VALUE;
2948 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002949 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002950 QCamera3ProcessingChannel *channel =
2951 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2952 if (channel == NULL) {
2953 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2954 frameNumber, (long)idx);
2955 return BAD_VALUE;
2956 }
2957 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2958 LOGE("Request %d: Buffer %ld: Status not OK!",
2959 frameNumber, (long)idx);
2960 return BAD_VALUE;
2961 }
2962 if (b->release_fence != -1) {
2963 LOGE("Request %d: Buffer %ld: Has a release fence!",
2964 frameNumber, (long)idx);
2965 return BAD_VALUE;
2966 }
2967 if (b->buffer == NULL) {
2968 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2969 frameNumber, (long)idx);
2970 return BAD_VALUE;
2971 }
2972 if (*(b->buffer) == NULL) {
2973 LOGE("Request %d: Buffer %ld: NULL private handle!",
2974 frameNumber, (long)idx);
2975 return BAD_VALUE;
2976 }
2977 idx++;
2978 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002980 return NO_ERROR;
2981}
2982
2983/*===========================================================================
2984 * FUNCTION : deriveMinFrameDuration
2985 *
2986 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2987 * on currently configured streams.
2988 *
2989 * PARAMETERS : NONE
2990 *
2991 * RETURN : NONE
2992 *
2993 *==========================================================================*/
2994void QCamera3HardwareInterface::deriveMinFrameDuration()
2995{
2996 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2997
2998 maxJpegDim = 0;
2999 maxProcessedDim = 0;
3000 maxRawDim = 0;
3001
3002 // Figure out maximum jpeg, processed, and raw dimensions
3003 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3004 it != mStreamInfo.end(); it++) {
3005
3006 // Input stream doesn't have valid stream_type
3007 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3008 continue;
3009
3010 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3011 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3012 if (dimension > maxJpegDim)
3013 maxJpegDim = dimension;
3014 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3015 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3016 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3017 if (dimension > maxRawDim)
3018 maxRawDim = dimension;
3019 } else {
3020 if (dimension > maxProcessedDim)
3021 maxProcessedDim = dimension;
3022 }
3023 }
3024
3025 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3026 MAX_SIZES_CNT);
3027
3028 //Assume all jpeg dimensions are in processed dimensions.
3029 if (maxJpegDim > maxProcessedDim)
3030 maxProcessedDim = maxJpegDim;
3031 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3032 if (maxProcessedDim > maxRawDim) {
3033 maxRawDim = INT32_MAX;
3034
3035 for (size_t i = 0; i < count; i++) {
3036 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3037 gCamCapability[mCameraId]->raw_dim[i].height;
3038 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3039 maxRawDim = dimension;
3040 }
3041 }
3042
3043 //Find minimum durations for processed, jpeg, and raw
3044 for (size_t i = 0; i < count; i++) {
3045 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3046 gCamCapability[mCameraId]->raw_dim[i].height) {
3047 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3048 break;
3049 }
3050 }
3051 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3052 for (size_t i = 0; i < count; i++) {
3053 if (maxProcessedDim ==
3054 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3055 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3056 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3057 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3058 break;
3059 }
3060 }
3061}
3062
3063/*===========================================================================
3064 * FUNCTION : getMinFrameDuration
3065 *
3066 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3067 * and current request configuration.
3068 *
3069 * PARAMETERS : @request: requset sent by the frameworks
3070 *
3071 * RETURN : min farme duration for a particular request
3072 *
3073 *==========================================================================*/
3074int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3075{
3076 bool hasJpegStream = false;
3077 bool hasRawStream = false;
3078 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3079 const camera3_stream_t *stream = request->output_buffers[i].stream;
3080 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3081 hasJpegStream = true;
3082 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3083 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3084 stream->format == HAL_PIXEL_FORMAT_RAW16)
3085 hasRawStream = true;
3086 }
3087
3088 if (!hasJpegStream)
3089 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3090 else
3091 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3092}
3093
3094/*===========================================================================
3095 * FUNCTION : handleBuffersDuringFlushLock
3096 *
3097 * DESCRIPTION: Account for buffers returned from back-end during flush
3098 * This function is executed while mMutex is held by the caller.
3099 *
3100 * PARAMETERS :
3101 * @buffer: image buffer for the callback
3102 *
3103 * RETURN :
3104 *==========================================================================*/
3105void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3106{
3107 bool buffer_found = false;
3108 for (List<PendingBuffersInRequest>::iterator req =
3109 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3110 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3111 for (List<PendingBufferInfo>::iterator i =
3112 req->mPendingBufferList.begin();
3113 i != req->mPendingBufferList.end(); i++) {
3114 if (i->buffer == buffer->buffer) {
3115 mPendingBuffersMap.numPendingBufsAtFlush--;
3116 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3117 buffer->buffer, req->frame_number,
3118 mPendingBuffersMap.numPendingBufsAtFlush);
3119 buffer_found = true;
3120 break;
3121 }
3122 }
3123 if (buffer_found) {
3124 break;
3125 }
3126 }
3127 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3128 //signal the flush()
3129 LOGD("All buffers returned to HAL. Continue flush");
3130 pthread_cond_signal(&mBuffersCond);
3131 }
3132}
3133
Thierry Strudel3d639192016-09-09 11:52:26 -07003134/*===========================================================================
3135 * FUNCTION : handleBatchMetadata
3136 *
3137 * DESCRIPTION: Handles metadata buffer callback in batch mode
3138 *
3139 * PARAMETERS : @metadata_buf: metadata buffer
3140 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3141 * the meta buf in this method
3142 *
3143 * RETURN :
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::handleBatchMetadata(
3147 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3148{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003149 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003150
3151 if (NULL == metadata_buf) {
3152 LOGE("metadata_buf is NULL");
3153 return;
3154 }
3155 /* In batch mode, the metdata will contain the frame number and timestamp of
3156 * the last frame in the batch. Eg: a batch containing buffers from request
3157 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3158 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3159 * multiple process_capture_results */
3160 metadata_buffer_t *metadata =
3161 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3162 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3163 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3164 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3165 uint32_t frame_number = 0, urgent_frame_number = 0;
3166 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3167 bool invalid_metadata = false;
3168 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3169 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003170 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003171
3172 int32_t *p_frame_number_valid =
3173 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3174 uint32_t *p_frame_number =
3175 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3176 int64_t *p_capture_time =
3177 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3178 int32_t *p_urgent_frame_number_valid =
3179 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3180 uint32_t *p_urgent_frame_number =
3181 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3182
3183 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3184 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3185 (NULL == p_urgent_frame_number)) {
3186 LOGE("Invalid metadata");
3187 invalid_metadata = true;
3188 } else {
3189 frame_number_valid = *p_frame_number_valid;
3190 last_frame_number = *p_frame_number;
3191 last_frame_capture_time = *p_capture_time;
3192 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3193 last_urgent_frame_number = *p_urgent_frame_number;
3194 }
3195
3196 /* In batchmode, when no video buffers are requested, set_parms are sent
3197 * for every capture_request. The difference between consecutive urgent
3198 * frame numbers and frame numbers should be used to interpolate the
3199 * corresponding frame numbers and time stamps */
3200 pthread_mutex_lock(&mMutex);
3201 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003202 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3203 if(idx < 0) {
3204 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3205 last_urgent_frame_number);
3206 mState = ERROR;
3207 pthread_mutex_unlock(&mMutex);
3208 return;
3209 }
3210 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003211 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3212 first_urgent_frame_number;
3213
3214 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3215 urgent_frame_number_valid,
3216 first_urgent_frame_number, last_urgent_frame_number);
3217 }
3218
3219 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003220 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3221 if(idx < 0) {
3222 LOGE("Invalid frame number received: %d. Irrecoverable error",
3223 last_frame_number);
3224 mState = ERROR;
3225 pthread_mutex_unlock(&mMutex);
3226 return;
3227 }
3228 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003229 frameNumDiff = last_frame_number + 1 -
3230 first_frame_number;
3231 mPendingBatchMap.removeItem(last_frame_number);
3232
3233 LOGD("frm: valid: %d frm_num: %d - %d",
3234 frame_number_valid,
3235 first_frame_number, last_frame_number);
3236
3237 }
3238 pthread_mutex_unlock(&mMutex);
3239
3240 if (urgent_frame_number_valid || frame_number_valid) {
3241 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3242 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3243 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3244 urgentFrameNumDiff, last_urgent_frame_number);
3245 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3246 LOGE("frameNumDiff: %d frameNum: %d",
3247 frameNumDiff, last_frame_number);
3248 }
3249
3250 for (size_t i = 0; i < loopCount; i++) {
3251 /* handleMetadataWithLock is called even for invalid_metadata for
3252 * pipeline depth calculation */
3253 if (!invalid_metadata) {
3254 /* Infer frame number. Batch metadata contains frame number of the
3255 * last frame */
3256 if (urgent_frame_number_valid) {
3257 if (i < urgentFrameNumDiff) {
3258 urgent_frame_number =
3259 first_urgent_frame_number + i;
3260 LOGD("inferred urgent frame_number: %d",
3261 urgent_frame_number);
3262 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3263 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3264 } else {
3265 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3266 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3267 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3268 }
3269 }
3270
3271 /* Infer frame number. Batch metadata contains frame number of the
3272 * last frame */
3273 if (frame_number_valid) {
3274 if (i < frameNumDiff) {
3275 frame_number = first_frame_number + i;
3276 LOGD("inferred frame_number: %d", frame_number);
3277 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3278 CAM_INTF_META_FRAME_NUMBER, frame_number);
3279 } else {
3280 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3281 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3282 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3283 }
3284 }
3285
3286 if (last_frame_capture_time) {
3287 //Infer timestamp
3288 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003289 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003291 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003292 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3293 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3294 LOGD("batch capture_time: %lld, capture_time: %lld",
3295 last_frame_capture_time, capture_time);
3296 }
3297 }
3298 pthread_mutex_lock(&mMutex);
3299 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003300 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003301 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3302 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003303 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003304 pthread_mutex_unlock(&mMutex);
3305 }
3306
3307 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003308 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003309 mMetadataChannel->bufDone(metadata_buf);
3310 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003311 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003312 }
3313}
3314
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003315void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3316 camera3_error_msg_code_t errorCode)
3317{
3318 camera3_notify_msg_t notify_msg;
3319 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3320 notify_msg.type = CAMERA3_MSG_ERROR;
3321 notify_msg.message.error.error_code = errorCode;
3322 notify_msg.message.error.error_stream = NULL;
3323 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003324 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003325
3326 return;
3327}
Thierry Strudel3d639192016-09-09 11:52:26 -07003328/*===========================================================================
3329 * FUNCTION : handleMetadataWithLock
3330 *
3331 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3332 *
3333 * PARAMETERS : @metadata_buf: metadata buffer
3334 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3335 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003336 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3337 * last urgent metadata in a batch. Always true for non-batch mode
3338 * @lastMetadataInBatch: Boolean to indicate whether this is the
3339 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3341 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003342 *
3343 * RETURN :
3344 *
3345 *==========================================================================*/
3346void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003347 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003348 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3349 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3353 //during flush do not send metadata from this thread
3354 LOGD("not sending metadata during flush or when mState is error");
3355 if (free_and_bufdone_meta_buf) {
3356 mMetadataChannel->bufDone(metadata_buf);
3357 free(metadata_buf);
3358 }
3359 return;
3360 }
3361
3362 //not in flush
3363 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3364 int32_t frame_number_valid, urgent_frame_number_valid;
3365 uint32_t frame_number, urgent_frame_number;
3366 int64_t capture_time;
3367 nsecs_t currentSysTime;
3368
3369 int32_t *p_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3372 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3373 int32_t *p_urgent_frame_number_valid =
3374 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3375 uint32_t *p_urgent_frame_number =
3376 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3377 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3378 metadata) {
3379 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3380 *p_frame_number_valid, *p_frame_number);
3381 }
3382
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003383 camera_metadata_t *resultMetadata = nullptr;
3384
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3386 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3387 LOGE("Invalid metadata");
3388 if (free_and_bufdone_meta_buf) {
3389 mMetadataChannel->bufDone(metadata_buf);
3390 free(metadata_buf);
3391 }
3392 goto done_metadata;
3393 }
3394 frame_number_valid = *p_frame_number_valid;
3395 frame_number = *p_frame_number;
3396 capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 urgent_frame_number = *p_urgent_frame_number;
3399 currentSysTime = systemTime(CLOCK_MONOTONIC);
3400
3401 // Detect if buffers from any requests are overdue
3402 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003403 int64_t timeout;
3404 {
3405 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3406 // If there is a pending HDR+ request, the following requests may be blocked until the
3407 // HDR+ request is done. So allow a longer timeout.
3408 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3409 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3410 }
3411
3412 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003413 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003414 assert(missed.stream->priv);
3415 if (missed.stream->priv) {
3416 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3417 assert(ch->mStreams[0]);
3418 if (ch->mStreams[0]) {
3419 LOGE("Cancel missing frame = %d, buffer = %p,"
3420 "stream type = %d, stream format = %d",
3421 req.frame_number, missed.buffer,
3422 ch->mStreams[0]->getMyType(), missed.stream->format);
3423 ch->timeoutFrame(req.frame_number);
3424 }
3425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 }
3427 }
3428 }
3429 //Partial result on process_capture_result for timestamp
3430 if (urgent_frame_number_valid) {
3431 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3432 urgent_frame_number, capture_time);
3433
3434 //Recieved an urgent Frame Number, handle it
3435 //using partial results
3436 for (pendingRequestIterator i =
3437 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3438 LOGD("Iterator Frame = %d urgent frame = %d",
3439 i->frame_number, urgent_frame_number);
3440
3441 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3442 (i->partial_result_cnt == 0)) {
3443 LOGE("Error: HAL missed urgent metadata for frame number %d",
3444 i->frame_number);
3445 }
3446
3447 if (i->frame_number == urgent_frame_number &&
3448 i->bUrgentReceived == 0) {
3449
3450 camera3_capture_result_t result;
3451 memset(&result, 0, sizeof(camera3_capture_result_t));
3452
3453 i->partial_result_cnt++;
3454 i->bUrgentReceived = 1;
3455 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003456 result.result = translateCbUrgentMetadataToResultMetadata(
3457 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003458 // Populate metadata result
3459 result.frame_number = urgent_frame_number;
3460 result.num_output_buffers = 0;
3461 result.output_buffers = NULL;
3462 result.partial_result = i->partial_result_cnt;
3463
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003464 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003465 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003466 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003467 result.partial_result == PARTIAL_RESULT_COUNT);
3468 }
3469
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003470 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003471 LOGD("urgent frame_number = %u, capture_time = %lld",
3472 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003473 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3474 // Instant AEC settled for this frame.
3475 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3476 mInstantAECSettledFrameNumber = urgent_frame_number;
3477 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 free_camera_metadata((camera_metadata_t *)result.result);
3479 break;
3480 }
3481 }
3482 }
3483
3484 if (!frame_number_valid) {
3485 LOGD("Not a valid normal frame number, used as SOF only");
3486 if (free_and_bufdone_meta_buf) {
3487 mMetadataChannel->bufDone(metadata_buf);
3488 free(metadata_buf);
3489 }
3490 goto done_metadata;
3491 }
3492 LOGH("valid frame_number = %u, capture_time = %lld",
3493 frame_number, capture_time);
3494
Emilian Peev7650c122017-01-19 08:24:33 -08003495 if (metadata->is_depth_data_valid) {
3496 handleDepthDataLocked(metadata->depth_data, frame_number);
3497 }
3498
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003499 // Check whether any stream buffer corresponding to this is dropped or not
3500 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3501 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3502 for (auto & pendingRequest : mPendingRequestsList) {
3503 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3504 mInstantAECSettledFrameNumber)) {
3505 camera3_notify_msg_t notify_msg = {};
3506 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003507 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003508 QCamera3ProcessingChannel *channel =
3509 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003510 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003511 if (p_cam_frame_drop) {
3512 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003513 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003514 // Got the stream ID for drop frame.
3515 dropFrame = true;
3516 break;
3517 }
3518 }
3519 } else {
3520 // This is instant AEC case.
3521 // For instant AEC drop the stream untill AEC is settled.
3522 dropFrame = true;
3523 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003524
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003525 if (dropFrame) {
3526 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3527 if (p_cam_frame_drop) {
3528 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003529 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003530 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003531 } else {
3532 // For instant AEC, inform frame drop and frame number
3533 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3534 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003535 pendingRequest.frame_number, streamID,
3536 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003537 }
3538 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003539 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003540 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003541 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003542 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003543 if (p_cam_frame_drop) {
3544 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003545 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003546 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003547 } else {
3548 // For instant AEC, inform frame drop and frame number
3549 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3550 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003551 pendingRequest.frame_number, streamID,
3552 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003553 }
3554 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003555 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003556 PendingFrameDrop.stream_ID = streamID;
3557 // Add the Frame drop info to mPendingFrameDropList
3558 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003560 }
3561 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003563
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003564 for (auto & pendingRequest : mPendingRequestsList) {
3565 // Find the pending request with the frame number.
3566 if (pendingRequest.frame_number == frame_number) {
3567 // Update the sensor timestamp.
3568 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003569
Thierry Strudel3d639192016-09-09 11:52:26 -07003570
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003571 /* Set the timestamp in display metadata so that clients aware of
3572 private_handle such as VT can use this un-modified timestamps.
3573 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003574 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003575
Thierry Strudel3d639192016-09-09 11:52:26 -07003576 // Find channel requiring metadata, meaning internal offline postprocess
3577 // is needed.
3578 //TODO: for now, we don't support two streams requiring metadata at the same time.
3579 // (because we are not making copies, and metadata buffer is not reference counted.
3580 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003581 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3582 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003583 if (iter->need_metadata) {
3584 internalPproc = true;
3585 QCamera3ProcessingChannel *channel =
3586 (QCamera3ProcessingChannel *)iter->stream->priv;
3587 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003588 if(p_is_metabuf_queued != NULL) {
3589 *p_is_metabuf_queued = true;
3590 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003591 break;
3592 }
3593 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003594 for (auto itr = pendingRequest.internalRequestList.begin();
3595 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003596 if (itr->need_metadata) {
3597 internalPproc = true;
3598 QCamera3ProcessingChannel *channel =
3599 (QCamera3ProcessingChannel *)itr->stream->priv;
3600 channel->queueReprocMetadata(metadata_buf);
3601 break;
3602 }
3603 }
3604
Thierry Strudel54dc9782017-02-15 12:12:10 -08003605 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606 resultMetadata = translateFromHalMetadata(metadata,
3607 pendingRequest.timestamp, pendingRequest.request_id,
3608 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3609 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003610 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003611 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003612 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003613 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003614 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003615 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003616
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003618
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003619 if (pendingRequest.blob_request) {
3620 //Dump tuning metadata if enabled and available
3621 char prop[PROPERTY_VALUE_MAX];
3622 memset(prop, 0, sizeof(prop));
3623 property_get("persist.camera.dumpmetadata", prop, "0");
3624 int32_t enabled = atoi(prop);
3625 if (enabled && metadata->is_tuning_params_valid) {
3626 dumpMetadataToFile(metadata->tuning_params,
3627 mMetaFrameCount,
3628 enabled,
3629 "Snapshot",
3630 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003631 }
3632 }
3633
3634 if (!internalPproc) {
3635 LOGD("couldn't find need_metadata for this metadata");
3636 // Return metadata buffer
3637 if (free_and_bufdone_meta_buf) {
3638 mMetadataChannel->bufDone(metadata_buf);
3639 free(metadata_buf);
3640 }
3641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003644 }
3645 }
3646
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003647 // Try to send out shutter callbacks and capture results.
3648 handlePendingResultsWithLock(frame_number, resultMetadata);
3649 return;
3650
Thierry Strudel3d639192016-09-09 11:52:26 -07003651done_metadata:
3652 for (pendingRequestIterator i = mPendingRequestsList.begin();
3653 i != mPendingRequestsList.end() ;i++) {
3654 i->pipeline_depth++;
3655 }
3656 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3657 unblockRequestIfNecessary();
3658}
3659
3660/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003661 * FUNCTION : handleDepthDataWithLock
3662 *
3663 * DESCRIPTION: Handles incoming depth data
3664 *
3665 * PARAMETERS : @depthData : Depth data
3666 * @frameNumber: Frame number of the incoming depth data
3667 *
3668 * RETURN :
3669 *
3670 *==========================================================================*/
3671void QCamera3HardwareInterface::handleDepthDataLocked(
3672 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3673 uint32_t currentFrameNumber;
3674 buffer_handle_t *depthBuffer;
3675
3676 if (nullptr == mDepthChannel) {
3677 LOGE("Depth channel not present!");
3678 return;
3679 }
3680
3681 camera3_stream_buffer_t resultBuffer =
3682 {.acquire_fence = -1,
3683 .release_fence = -1,
3684 .status = CAMERA3_BUFFER_STATUS_OK,
3685 .buffer = nullptr,
3686 .stream = mDepthChannel->getStream()};
3687 camera3_capture_result_t result =
3688 {.result = nullptr,
3689 .num_output_buffers = 1,
3690 .output_buffers = &resultBuffer,
3691 .partial_result = 0,
3692 .frame_number = 0};
3693
3694 do {
3695 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3696 if (nullptr == depthBuffer) {
3697 break;
3698 }
3699
3700 result.frame_number = currentFrameNumber;
3701 resultBuffer.buffer = depthBuffer;
3702 if (currentFrameNumber == frameNumber) {
3703 int32_t rc = mDepthChannel->populateDepthData(depthData,
3704 frameNumber);
3705 if (NO_ERROR != rc) {
3706 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3707 } else {
3708 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3709 }
3710 } else if (currentFrameNumber > frameNumber) {
3711 break;
3712 } else {
3713 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3714 {{currentFrameNumber, mDepthChannel->getStream(),
3715 CAMERA3_MSG_ERROR_BUFFER}}};
3716 orchestrateNotify(&notify_msg);
3717
3718 LOGE("Depth buffer for frame number: %d is missing "
3719 "returning back!", currentFrameNumber);
3720 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3721 }
3722 mDepthChannel->unmapBuffer(currentFrameNumber);
3723
3724 orchestrateResult(&result);
3725 } while (currentFrameNumber < frameNumber);
3726}
3727
3728/*===========================================================================
3729 * FUNCTION : notifyErrorFoPendingDepthData
3730 *
3731 * DESCRIPTION: Returns error for any pending depth buffers
3732 *
3733 * PARAMETERS : depthCh - depth channel that needs to get flushed
3734 *
3735 * RETURN :
3736 *
3737 *==========================================================================*/
3738void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3739 QCamera3DepthChannel *depthCh) {
3740 uint32_t currentFrameNumber;
3741 buffer_handle_t *depthBuffer;
3742
3743 if (nullptr == depthCh) {
3744 return;
3745 }
3746
3747 camera3_notify_msg_t notify_msg =
3748 {.type = CAMERA3_MSG_ERROR,
3749 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3750 camera3_stream_buffer_t resultBuffer =
3751 {.acquire_fence = -1,
3752 .release_fence = -1,
3753 .buffer = nullptr,
3754 .stream = depthCh->getStream(),
3755 .status = CAMERA3_BUFFER_STATUS_ERROR};
3756 camera3_capture_result_t result =
3757 {.result = nullptr,
3758 .frame_number = 0,
3759 .num_output_buffers = 1,
3760 .partial_result = 0,
3761 .output_buffers = &resultBuffer};
3762
3763 while (nullptr !=
3764 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3765 depthCh->unmapBuffer(currentFrameNumber);
3766
3767 notify_msg.message.error.frame_number = currentFrameNumber;
3768 orchestrateNotify(&notify_msg);
3769
3770 resultBuffer.buffer = depthBuffer;
3771 result.frame_number = currentFrameNumber;
3772 orchestrateResult(&result);
3773 };
3774}
3775
3776/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003777 * FUNCTION : hdrPlusPerfLock
3778 *
3779 * DESCRIPTION: perf lock for HDR+ using custom intent
3780 *
3781 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3782 *
3783 * RETURN : None
3784 *
3785 *==========================================================================*/
3786void QCamera3HardwareInterface::hdrPlusPerfLock(
3787 mm_camera_super_buf_t *metadata_buf)
3788{
3789 if (NULL == metadata_buf) {
3790 LOGE("metadata_buf is NULL");
3791 return;
3792 }
3793 metadata_buffer_t *metadata =
3794 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3795 int32_t *p_frame_number_valid =
3796 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3797 uint32_t *p_frame_number =
3798 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3799
3800 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3801 LOGE("%s: Invalid metadata", __func__);
3802 return;
3803 }
3804
3805 //acquire perf lock for 5 sec after the last HDR frame is captured
3806 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3807 if ((p_frame_number != NULL) &&
3808 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003809 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 }
3811 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003812}
3813
3814/*===========================================================================
3815 * FUNCTION : handleInputBufferWithLock
3816 *
3817 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3818 *
3819 * PARAMETERS : @frame_number: frame number of the input buffer
3820 *
3821 * RETURN :
3822 *
3823 *==========================================================================*/
3824void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3825{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003826 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003827 pendingRequestIterator i = mPendingRequestsList.begin();
3828 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3829 i++;
3830 }
3831 if (i != mPendingRequestsList.end() && i->input_buffer) {
3832 //found the right request
3833 if (!i->shutter_notified) {
3834 CameraMetadata settings;
3835 camera3_notify_msg_t notify_msg;
3836 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3837 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3838 if(i->settings) {
3839 settings = i->settings;
3840 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3841 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3842 } else {
3843 LOGE("No timestamp in input settings! Using current one.");
3844 }
3845 } else {
3846 LOGE("Input settings missing!");
3847 }
3848
3849 notify_msg.type = CAMERA3_MSG_SHUTTER;
3850 notify_msg.message.shutter.frame_number = frame_number;
3851 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003852 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003853 i->shutter_notified = true;
3854 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3855 i->frame_number, notify_msg.message.shutter.timestamp);
3856 }
3857
3858 if (i->input_buffer->release_fence != -1) {
3859 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3860 close(i->input_buffer->release_fence);
3861 if (rc != OK) {
3862 LOGE("input buffer sync wait failed %d", rc);
3863 }
3864 }
3865
3866 camera3_capture_result result;
3867 memset(&result, 0, sizeof(camera3_capture_result));
3868 result.frame_number = frame_number;
3869 result.result = i->settings;
3870 result.input_buffer = i->input_buffer;
3871 result.partial_result = PARTIAL_RESULT_COUNT;
3872
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003873 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003874 LOGD("Input request metadata and input buffer frame_number = %u",
3875 i->frame_number);
3876 i = erasePendingRequest(i);
3877 } else {
3878 LOGE("Could not find input request for frame number %d", frame_number);
3879 }
3880}
3881
3882/*===========================================================================
3883 * FUNCTION : handleBufferWithLock
3884 *
3885 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3886 *
3887 * PARAMETERS : @buffer: image buffer for the callback
3888 * @frame_number: frame number of the image buffer
3889 *
3890 * RETURN :
3891 *
3892 *==========================================================================*/
3893void QCamera3HardwareInterface::handleBufferWithLock(
3894 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3895{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003896 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003897
3898 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3899 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3900 }
3901
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 /* Nothing to be done during error state */
3903 if ((ERROR == mState) || (DEINIT == mState)) {
3904 return;
3905 }
3906 if (mFlushPerf) {
3907 handleBuffersDuringFlushLock(buffer);
3908 return;
3909 }
3910 //not in flush
3911 // If the frame number doesn't exist in the pending request list,
3912 // directly send the buffer to the frameworks, and update pending buffers map
3913 // Otherwise, book-keep the buffer.
3914 pendingRequestIterator i = mPendingRequestsList.begin();
3915 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3916 i++;
3917 }
3918 if (i == mPendingRequestsList.end()) {
3919 // Verify all pending requests frame_numbers are greater
3920 for (pendingRequestIterator j = mPendingRequestsList.begin();
3921 j != mPendingRequestsList.end(); j++) {
3922 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3923 LOGW("Error: pending live frame number %d is smaller than %d",
3924 j->frame_number, frame_number);
3925 }
3926 }
3927 camera3_capture_result_t result;
3928 memset(&result, 0, sizeof(camera3_capture_result_t));
3929 result.result = NULL;
3930 result.frame_number = frame_number;
3931 result.num_output_buffers = 1;
3932 result.partial_result = 0;
3933 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3934 m != mPendingFrameDropList.end(); m++) {
3935 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3936 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3937 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3938 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3939 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3940 frame_number, streamID);
3941 m = mPendingFrameDropList.erase(m);
3942 break;
3943 }
3944 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003945 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003946 result.output_buffers = buffer;
3947 LOGH("result frame_number = %d, buffer = %p",
3948 frame_number, buffer->buffer);
3949
3950 mPendingBuffersMap.removeBuf(buffer->buffer);
3951
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003952 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003953 } else {
3954 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003955 if (i->input_buffer->release_fence != -1) {
3956 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3957 close(i->input_buffer->release_fence);
3958 if (rc != OK) {
3959 LOGE("input buffer sync wait failed %d", rc);
3960 }
3961 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003962 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003963
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003964 // Put buffer into the pending request
3965 for (auto &requestedBuffer : i->buffers) {
3966 if (requestedBuffer.stream == buffer->stream) {
3967 if (requestedBuffer.buffer != nullptr) {
3968 LOGE("Error: buffer is already set");
3969 } else {
3970 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3971 sizeof(camera3_stream_buffer_t));
3972 *(requestedBuffer.buffer) = *buffer;
3973 LOGH("cache buffer %p at result frame_number %u",
3974 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003975 }
3976 }
3977 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003978
3979 if (i->input_buffer) {
3980 // For a reprocessing request, try to send out shutter callback and result metadata.
3981 handlePendingResultsWithLock(frame_number, nullptr);
3982 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003983 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003984
3985 if (mPreviewStarted == false) {
3986 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3987 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3988 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3989 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3990 mPreviewStarted = true;
3991
3992 // Set power hint for preview
3993 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3994 }
3995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003996}
3997
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003998void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3999 const camera_metadata_t *resultMetadata)
4000{
4001 // Find the pending request for this result metadata.
4002 auto requestIter = mPendingRequestsList.begin();
4003 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4004 requestIter++;
4005 }
4006
4007 if (requestIter == mPendingRequestsList.end()) {
4008 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4009 return;
4010 }
4011
4012 // Update the result metadata
4013 requestIter->resultMetadata = resultMetadata;
4014
4015 // Check what type of request this is.
4016 bool liveRequest = false;
4017 if (requestIter->hdrplus) {
4018 // HDR+ request doesn't have partial results.
4019 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4020 } else if (requestIter->input_buffer != nullptr) {
4021 // Reprocessing request result is the same as settings.
4022 requestIter->resultMetadata = requestIter->settings;
4023 // Reprocessing request doesn't have partial results.
4024 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4025 } else {
4026 liveRequest = true;
4027 requestIter->partial_result_cnt++;
4028 mPendingLiveRequest--;
4029
4030 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004031 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4032 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004033 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4034 }
4035 }
4036
4037 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4038 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4039 bool readyToSend = true;
4040
4041 // Iterate through the pending requests to send out shutter callbacks and results that are
4042 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4043 // live requests that don't have result metadata yet.
4044 auto iter = mPendingRequestsList.begin();
4045 while (iter != mPendingRequestsList.end()) {
4046 // Check if current pending request is ready. If it's not ready, the following pending
4047 // requests are also not ready.
4048 if (readyToSend && iter->resultMetadata == nullptr) {
4049 readyToSend = false;
4050 }
4051
4052 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4053
4054 std::vector<camera3_stream_buffer_t> outputBuffers;
4055
4056 camera3_capture_result_t result = {};
4057 result.frame_number = iter->frame_number;
4058 result.result = iter->resultMetadata;
4059 result.partial_result = iter->partial_result_cnt;
4060
4061 // If this pending buffer has result metadata, we may be able to send out shutter callback
4062 // and result metadata.
4063 if (iter->resultMetadata != nullptr) {
4064 if (!readyToSend) {
4065 // If any of the previous pending request is not ready, this pending request is
4066 // also not ready to send in order to keep shutter callbacks and result metadata
4067 // in order.
4068 iter++;
4069 continue;
4070 }
4071
4072 // Invoke shutter callback if not yet.
4073 if (!iter->shutter_notified) {
4074 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4075
4076 // Find the timestamp in HDR+ result metadata
4077 camera_metadata_ro_entry_t entry;
4078 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4079 ANDROID_SENSOR_TIMESTAMP, &entry);
4080 if (res != OK) {
4081 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4082 __FUNCTION__, iter->frame_number, strerror(-res), res);
4083 } else {
4084 timestamp = entry.data.i64[0];
4085 }
4086
4087 camera3_notify_msg_t notify_msg = {};
4088 notify_msg.type = CAMERA3_MSG_SHUTTER;
4089 notify_msg.message.shutter.frame_number = iter->frame_number;
4090 notify_msg.message.shutter.timestamp = timestamp;
4091 orchestrateNotify(&notify_msg);
4092 iter->shutter_notified = true;
4093 }
4094
4095 result.input_buffer = iter->input_buffer;
4096
4097 // Prepare output buffer array
4098 for (auto bufferInfoIter = iter->buffers.begin();
4099 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4100 if (bufferInfoIter->buffer != nullptr) {
4101
4102 QCamera3Channel *channel =
4103 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4104 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4105
4106 // Check if this buffer is a dropped frame.
4107 auto frameDropIter = mPendingFrameDropList.begin();
4108 while (frameDropIter != mPendingFrameDropList.end()) {
4109 if((frameDropIter->stream_ID == streamID) &&
4110 (frameDropIter->frame_number == frameNumber)) {
4111 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4112 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4113 streamID);
4114 mPendingFrameDropList.erase(frameDropIter);
4115 break;
4116 } else {
4117 frameDropIter++;
4118 }
4119 }
4120
4121 // Check buffer error status
4122 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4123 bufferInfoIter->buffer->buffer);
4124 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4125
4126 outputBuffers.push_back(*(bufferInfoIter->buffer));
4127 free(bufferInfoIter->buffer);
4128 bufferInfoIter->buffer = NULL;
4129 }
4130 }
4131
4132 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4133 result.num_output_buffers = outputBuffers.size();
4134 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4135 // If the result metadata belongs to a live request, notify errors for previous pending
4136 // live requests.
4137 mPendingLiveRequest--;
4138
4139 CameraMetadata dummyMetadata;
4140 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4141 result.result = dummyMetadata.release();
4142
4143 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4144 } else {
4145 iter++;
4146 continue;
4147 }
4148
4149 orchestrateResult(&result);
4150
4151 // For reprocessing, result metadata is the same as settings so do not free it here to
4152 // avoid double free.
4153 if (result.result != iter->settings) {
4154 free_camera_metadata((camera_metadata_t *)result.result);
4155 }
4156 iter->resultMetadata = nullptr;
4157 iter = erasePendingRequest(iter);
4158 }
4159
4160 if (liveRequest) {
4161 for (auto &iter : mPendingRequestsList) {
4162 // Increment pipeline depth for the following pending requests.
4163 if (iter.frame_number > frameNumber) {
4164 iter.pipeline_depth++;
4165 }
4166 }
4167 }
4168
4169 unblockRequestIfNecessary();
4170}
4171
Thierry Strudel3d639192016-09-09 11:52:26 -07004172/*===========================================================================
4173 * FUNCTION : unblockRequestIfNecessary
4174 *
4175 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4176 * that mMutex is held when this function is called.
4177 *
4178 * PARAMETERS :
4179 *
4180 * RETURN :
4181 *
4182 *==========================================================================*/
4183void QCamera3HardwareInterface::unblockRequestIfNecessary()
4184{
4185 // Unblock process_capture_request
4186 pthread_cond_signal(&mRequestCond);
4187}
4188
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004189/*===========================================================================
4190 * FUNCTION : isHdrSnapshotRequest
4191 *
4192 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4193 *
4194 * PARAMETERS : camera3 request structure
4195 *
4196 * RETURN : boolean decision variable
4197 *
4198 *==========================================================================*/
4199bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4200{
4201 if (request == NULL) {
4202 LOGE("Invalid request handle");
4203 assert(0);
4204 return false;
4205 }
4206
4207 if (!mForceHdrSnapshot) {
4208 CameraMetadata frame_settings;
4209 frame_settings = request->settings;
4210
4211 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4212 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4213 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4214 return false;
4215 }
4216 } else {
4217 return false;
4218 }
4219
4220 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4221 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4222 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4223 return false;
4224 }
4225 } else {
4226 return false;
4227 }
4228 }
4229
4230 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4231 if (request->output_buffers[i].stream->format
4232 == HAL_PIXEL_FORMAT_BLOB) {
4233 return true;
4234 }
4235 }
4236
4237 return false;
4238}
4239/*===========================================================================
4240 * FUNCTION : orchestrateRequest
4241 *
4242 * DESCRIPTION: Orchestrates a capture request from camera service
4243 *
4244 * PARAMETERS :
4245 * @request : request from framework to process
4246 *
4247 * RETURN : Error status codes
4248 *
4249 *==========================================================================*/
4250int32_t QCamera3HardwareInterface::orchestrateRequest(
4251 camera3_capture_request_t *request)
4252{
4253
4254 uint32_t originalFrameNumber = request->frame_number;
4255 uint32_t originalOutputCount = request->num_output_buffers;
4256 const camera_metadata_t *original_settings = request->settings;
4257 List<InternalRequest> internallyRequestedStreams;
4258 List<InternalRequest> emptyInternalList;
4259
4260 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4261 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4262 uint32_t internalFrameNumber;
4263 CameraMetadata modified_meta;
4264
4265
4266 /* Add Blob channel to list of internally requested streams */
4267 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4268 if (request->output_buffers[i].stream->format
4269 == HAL_PIXEL_FORMAT_BLOB) {
4270 InternalRequest streamRequested;
4271 streamRequested.meteringOnly = 1;
4272 streamRequested.need_metadata = 0;
4273 streamRequested.stream = request->output_buffers[i].stream;
4274 internallyRequestedStreams.push_back(streamRequested);
4275 }
4276 }
4277 request->num_output_buffers = 0;
4278 auto itr = internallyRequestedStreams.begin();
4279
4280 /* Modify setting to set compensation */
4281 modified_meta = request->settings;
4282 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4283 uint8_t aeLock = 1;
4284 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4285 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4286 camera_metadata_t *modified_settings = modified_meta.release();
4287 request->settings = modified_settings;
4288
4289 /* Capture Settling & -2x frame */
4290 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4291 request->frame_number = internalFrameNumber;
4292 processCaptureRequest(request, internallyRequestedStreams);
4293
4294 request->num_output_buffers = originalOutputCount;
4295 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4296 request->frame_number = internalFrameNumber;
4297 processCaptureRequest(request, emptyInternalList);
4298 request->num_output_buffers = 0;
4299
4300 modified_meta = modified_settings;
4301 expCompensation = 0;
4302 aeLock = 1;
4303 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4304 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4305 modified_settings = modified_meta.release();
4306 request->settings = modified_settings;
4307
4308 /* Capture Settling & 0X frame */
4309
4310 itr = internallyRequestedStreams.begin();
4311 if (itr == internallyRequestedStreams.end()) {
4312 LOGE("Error Internally Requested Stream list is empty");
4313 assert(0);
4314 } else {
4315 itr->need_metadata = 0;
4316 itr->meteringOnly = 1;
4317 }
4318
4319 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4320 request->frame_number = internalFrameNumber;
4321 processCaptureRequest(request, internallyRequestedStreams);
4322
4323 itr = internallyRequestedStreams.begin();
4324 if (itr == internallyRequestedStreams.end()) {
4325 ALOGE("Error Internally Requested Stream list is empty");
4326 assert(0);
4327 } else {
4328 itr->need_metadata = 1;
4329 itr->meteringOnly = 0;
4330 }
4331
4332 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4333 request->frame_number = internalFrameNumber;
4334 processCaptureRequest(request, internallyRequestedStreams);
4335
4336 /* Capture 2X frame*/
4337 modified_meta = modified_settings;
4338 expCompensation = GB_HDR_2X_STEP_EV;
4339 aeLock = 1;
4340 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4341 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4342 modified_settings = modified_meta.release();
4343 request->settings = modified_settings;
4344
4345 itr = internallyRequestedStreams.begin();
4346 if (itr == internallyRequestedStreams.end()) {
4347 ALOGE("Error Internally Requested Stream list is empty");
4348 assert(0);
4349 } else {
4350 itr->need_metadata = 0;
4351 itr->meteringOnly = 1;
4352 }
4353 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4354 request->frame_number = internalFrameNumber;
4355 processCaptureRequest(request, internallyRequestedStreams);
4356
4357 itr = internallyRequestedStreams.begin();
4358 if (itr == internallyRequestedStreams.end()) {
4359 ALOGE("Error Internally Requested Stream list is empty");
4360 assert(0);
4361 } else {
4362 itr->need_metadata = 1;
4363 itr->meteringOnly = 0;
4364 }
4365
4366 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4367 request->frame_number = internalFrameNumber;
4368 processCaptureRequest(request, internallyRequestedStreams);
4369
4370
4371 /* Capture 2X on original streaming config*/
4372 internallyRequestedStreams.clear();
4373
4374 /* Restore original settings pointer */
4375 request->settings = original_settings;
4376 } else {
4377 uint32_t internalFrameNumber;
4378 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4379 request->frame_number = internalFrameNumber;
4380 return processCaptureRequest(request, internallyRequestedStreams);
4381 }
4382
4383 return NO_ERROR;
4384}
4385
4386/*===========================================================================
4387 * FUNCTION : orchestrateResult
4388 *
4389 * DESCRIPTION: Orchestrates a capture result to camera service
4390 *
4391 * PARAMETERS :
4392 * @request : request from framework to process
4393 *
4394 * RETURN :
4395 *
4396 *==========================================================================*/
4397void QCamera3HardwareInterface::orchestrateResult(
4398 camera3_capture_result_t *result)
4399{
4400 uint32_t frameworkFrameNumber;
4401 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4402 frameworkFrameNumber);
4403 if (rc != NO_ERROR) {
4404 LOGE("Cannot find translated frameworkFrameNumber");
4405 assert(0);
4406 } else {
4407 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004408 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004409 } else {
4410 result->frame_number = frameworkFrameNumber;
4411 mCallbackOps->process_capture_result(mCallbackOps, result);
4412 }
4413 }
4414}
4415
4416/*===========================================================================
4417 * FUNCTION : orchestrateNotify
4418 *
4419 * DESCRIPTION: Orchestrates a notify to camera service
4420 *
4421 * PARAMETERS :
4422 * @request : request from framework to process
4423 *
4424 * RETURN :
4425 *
4426 *==========================================================================*/
4427void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4428{
4429 uint32_t frameworkFrameNumber;
4430 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004431 int32_t rc = NO_ERROR;
4432
4433 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004434 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004435
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004436 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004437 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4438 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4439 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004440 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004441 LOGE("Cannot find translated frameworkFrameNumber");
4442 assert(0);
4443 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004444 }
4445 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004446
4447 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4448 LOGD("Internal Request drop the notifyCb");
4449 } else {
4450 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4451 mCallbackOps->notify(mCallbackOps, notify_msg);
4452 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004453}
4454
4455/*===========================================================================
4456 * FUNCTION : FrameNumberRegistry
4457 *
4458 * DESCRIPTION: Constructor
4459 *
4460 * PARAMETERS :
4461 *
4462 * RETURN :
4463 *
4464 *==========================================================================*/
4465FrameNumberRegistry::FrameNumberRegistry()
4466{
4467 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4468}
4469
4470/*===========================================================================
4471 * FUNCTION : ~FrameNumberRegistry
4472 *
4473 * DESCRIPTION: Destructor
4474 *
4475 * PARAMETERS :
4476 *
4477 * RETURN :
4478 *
4479 *==========================================================================*/
4480FrameNumberRegistry::~FrameNumberRegistry()
4481{
4482}
4483
4484/*===========================================================================
4485 * FUNCTION : PurgeOldEntriesLocked
4486 *
4487 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4488 *
4489 * PARAMETERS :
4490 *
4491 * RETURN : NONE
4492 *
4493 *==========================================================================*/
4494void FrameNumberRegistry::purgeOldEntriesLocked()
4495{
4496 while (_register.begin() != _register.end()) {
4497 auto itr = _register.begin();
4498 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4499 _register.erase(itr);
4500 } else {
4501 return;
4502 }
4503 }
4504}
4505
4506/*===========================================================================
4507 * FUNCTION : allocStoreInternalFrameNumber
4508 *
4509 * DESCRIPTION: Method to note down a framework request and associate a new
4510 * internal request number against it
4511 *
4512 * PARAMETERS :
4513 * @fFrameNumber: Identifier given by framework
4514 * @internalFN : Output parameter which will have the newly generated internal
4515 * entry
4516 *
4517 * RETURN : Error code
4518 *
4519 *==========================================================================*/
4520int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4521 uint32_t &internalFrameNumber)
4522{
4523 Mutex::Autolock lock(mRegistryLock);
4524 internalFrameNumber = _nextFreeInternalNumber++;
4525 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4526 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4527 purgeOldEntriesLocked();
4528 return NO_ERROR;
4529}
4530
4531/*===========================================================================
4532 * FUNCTION : generateStoreInternalFrameNumber
4533 *
4534 * DESCRIPTION: Method to associate a new internal request number independent
4535 * of any associate with framework requests
4536 *
4537 * PARAMETERS :
4538 * @internalFrame#: Output parameter which will have the newly generated internal
4539 *
4540 *
4541 * RETURN : Error code
4542 *
4543 *==========================================================================*/
4544int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4545{
4546 Mutex::Autolock lock(mRegistryLock);
4547 internalFrameNumber = _nextFreeInternalNumber++;
4548 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4549 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4550 purgeOldEntriesLocked();
4551 return NO_ERROR;
4552}
4553
4554/*===========================================================================
4555 * FUNCTION : getFrameworkFrameNumber
4556 *
4557 * DESCRIPTION: Method to query the framework framenumber given an internal #
4558 *
4559 * PARAMETERS :
4560 * @internalFrame#: Internal reference
4561 * @frameworkframenumber: Output parameter holding framework frame entry
4562 *
4563 * RETURN : Error code
4564 *
4565 *==========================================================================*/
4566int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4567 uint32_t &frameworkFrameNumber)
4568{
4569 Mutex::Autolock lock(mRegistryLock);
4570 auto itr = _register.find(internalFrameNumber);
4571 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004572 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004573 return -ENOENT;
4574 }
4575
4576 frameworkFrameNumber = itr->second;
4577 purgeOldEntriesLocked();
4578 return NO_ERROR;
4579}
Thierry Strudel3d639192016-09-09 11:52:26 -07004580
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004581status_t QCamera3HardwareInterface::fillPbStreamConfig(
4582 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4583 QCamera3Channel *channel, uint32_t streamIndex) {
4584 if (config == nullptr) {
4585 LOGE("%s: config is null", __FUNCTION__);
4586 return BAD_VALUE;
4587 }
4588
4589 if (channel == nullptr) {
4590 LOGE("%s: channel is null", __FUNCTION__);
4591 return BAD_VALUE;
4592 }
4593
4594 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4595 if (stream == nullptr) {
4596 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4597 return NAME_NOT_FOUND;
4598 }
4599
4600 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4601 if (streamInfo == nullptr) {
4602 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4603 return NAME_NOT_FOUND;
4604 }
4605
4606 config->id = pbStreamId;
4607 config->image.width = streamInfo->dim.width;
4608 config->image.height = streamInfo->dim.height;
4609 config->image.padding = 0;
4610 config->image.format = pbStreamFormat;
4611
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004612 uint32_t totalPlaneSize = 0;
4613
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004614 // Fill plane information.
4615 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4616 pbcamera::PlaneConfiguration plane;
4617 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4618 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4619 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004620
4621 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004622 }
4623
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004624 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004625 return OK;
4626}
4627
Thierry Strudel3d639192016-09-09 11:52:26 -07004628/*===========================================================================
4629 * FUNCTION : processCaptureRequest
4630 *
4631 * DESCRIPTION: process a capture request from camera service
4632 *
4633 * PARAMETERS :
4634 * @request : request from framework to process
4635 *
4636 * RETURN :
4637 *
4638 *==========================================================================*/
4639int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004640 camera3_capture_request_t *request,
4641 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004642{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004644 int rc = NO_ERROR;
4645 int32_t request_id;
4646 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004647 bool isVidBufRequested = false;
4648 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004649 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004650
4651 pthread_mutex_lock(&mMutex);
4652
4653 // Validate current state
4654 switch (mState) {
4655 case CONFIGURED:
4656 case STARTED:
4657 /* valid state */
4658 break;
4659
4660 case ERROR:
4661 pthread_mutex_unlock(&mMutex);
4662 handleCameraDeviceError();
4663 return -ENODEV;
4664
4665 default:
4666 LOGE("Invalid state %d", mState);
4667 pthread_mutex_unlock(&mMutex);
4668 return -ENODEV;
4669 }
4670
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004671 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004672 if (rc != NO_ERROR) {
4673 LOGE("incoming request is not valid");
4674 pthread_mutex_unlock(&mMutex);
4675 return rc;
4676 }
4677
4678 meta = request->settings;
4679
4680 // For first capture request, send capture intent, and
4681 // stream on all streams
4682 if (mState == CONFIGURED) {
4683 // send an unconfigure to the backend so that the isp
4684 // resources are deallocated
4685 if (!mFirstConfiguration) {
4686 cam_stream_size_info_t stream_config_info;
4687 int32_t hal_version = CAM_HAL_V3;
4688 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4689 stream_config_info.buffer_info.min_buffers =
4690 MIN_INFLIGHT_REQUESTS;
4691 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004692 m_bIs4KVideo ? 0 :
4693 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004694 clear_metadata_buffer(mParameters);
4695 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4696 CAM_INTF_PARM_HAL_VERSION, hal_version);
4697 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4698 CAM_INTF_META_STREAM_INFO, stream_config_info);
4699 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4700 mParameters);
4701 if (rc < 0) {
4702 LOGE("set_parms for unconfigure failed");
4703 pthread_mutex_unlock(&mMutex);
4704 return rc;
4705 }
4706 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004707 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004708 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004709 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004710 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004711 property_get("persist.camera.is_type", is_type_value, "4");
4712 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4713 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4714 property_get("persist.camera.is_type_preview", is_type_value, "4");
4715 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4716 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004717
4718 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4719 int32_t hal_version = CAM_HAL_V3;
4720 uint8_t captureIntent =
4721 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4722 mCaptureIntent = captureIntent;
4723 clear_metadata_buffer(mParameters);
4724 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4725 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4726 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004727 if (mFirstConfiguration) {
4728 // configure instant AEC
4729 // Instant AEC is a session based parameter and it is needed only
4730 // once per complete session after open camera.
4731 // i.e. This is set only once for the first capture request, after open camera.
4732 setInstantAEC(meta);
4733 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004734 uint8_t fwkVideoStabMode=0;
4735 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4736 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4737 }
4738
4739 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4740 // turn it on for video/preview
4741 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4742 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004743 int32_t vsMode;
4744 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4746 rc = BAD_VALUE;
4747 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004748 LOGD("setEis %d", setEis);
4749 bool eis3Supported = false;
4750 size_t count = IS_TYPE_MAX;
4751 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4752 for (size_t i = 0; i < count; i++) {
4753 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4754 eis3Supported = true;
4755 break;
4756 }
4757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004758
4759 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004760 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004761 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4762 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004763 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4764 is_type = isTypePreview;
4765 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4766 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4767 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004768 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004769 } else {
4770 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004771 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004772 } else {
4773 is_type = IS_TYPE_NONE;
4774 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004775 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004776 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4778 }
4779 }
4780
4781 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4782 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4783
Thierry Strudel54dc9782017-02-15 12:12:10 -08004784 //Disable tintless only if the property is set to 0
4785 memset(prop, 0, sizeof(prop));
4786 property_get("persist.camera.tintless.enable", prop, "1");
4787 int32_t tintless_value = atoi(prop);
4788
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4790 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004791
Thierry Strudel3d639192016-09-09 11:52:26 -07004792 //Disable CDS for HFR mode or if DIS/EIS is on.
4793 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4794 //after every configure_stream
4795 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4796 (m_bIsVideo)) {
4797 int32_t cds = CAM_CDS_MODE_OFF;
4798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4799 CAM_INTF_PARM_CDS_MODE, cds))
4800 LOGE("Failed to disable CDS for HFR mode");
4801
4802 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004803
4804 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4805 uint8_t* use_av_timer = NULL;
4806
4807 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004808 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004809 use_av_timer = &m_debug_avtimer;
4810 }
4811 else{
4812 use_av_timer =
4813 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004814 if (use_av_timer) {
4815 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4816 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004817 }
4818
4819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4820 rc = BAD_VALUE;
4821 }
4822 }
4823
Thierry Strudel3d639192016-09-09 11:52:26 -07004824 setMobicat();
4825
4826 /* Set fps and hfr mode while sending meta stream info so that sensor
4827 * can configure appropriate streaming mode */
4828 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004829 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4830 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004831 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4832 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004833 if (rc == NO_ERROR) {
4834 int32_t max_fps =
4835 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004836 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4838 }
4839 /* For HFR, more buffers are dequeued upfront to improve the performance */
4840 if (mBatchSize) {
4841 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4842 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4843 }
4844 }
4845 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 LOGE("setHalFpsRange failed");
4847 }
4848 }
4849 if (meta.exists(ANDROID_CONTROL_MODE)) {
4850 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4851 rc = extractSceneMode(meta, metaMode, mParameters);
4852 if (rc != NO_ERROR) {
4853 LOGE("extractSceneMode failed");
4854 }
4855 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004856 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004857
Thierry Strudel04e026f2016-10-10 11:27:36 -07004858 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4859 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4860 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4861 rc = setVideoHdrMode(mParameters, vhdr);
4862 if (rc != NO_ERROR) {
4863 LOGE("setVideoHDR is failed");
4864 }
4865 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004866
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 //TODO: validate the arguments, HSV scenemode should have only the
4868 //advertised fps ranges
4869
4870 /*set the capture intent, hal version, tintless, stream info,
4871 *and disenable parameters to the backend*/
4872 LOGD("set_parms META_STREAM_INFO " );
4873 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004874 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4875 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004876 mStreamConfigInfo.type[i],
4877 mStreamConfigInfo.stream_sizes[i].width,
4878 mStreamConfigInfo.stream_sizes[i].height,
4879 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 mStreamConfigInfo.format[i],
4881 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4885 mParameters);
4886 if (rc < 0) {
4887 LOGE("set_parms failed for hal version, stream info");
4888 }
4889
Chien-Yu Chenee335912017-02-09 17:53:20 -08004890 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4891 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 if (rc != NO_ERROR) {
4893 LOGE("Failed to get sensor output size");
4894 pthread_mutex_unlock(&mMutex);
4895 goto error_exit;
4896 }
4897
4898 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4899 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004900 mSensorModeInfo.active_array_size.width,
4901 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004902
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004903 if (gHdrPlusClient != nullptr) {
4904 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4905 if (rc != OK) {
4906 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4907 mCameraId, mSensorModeInfo.op_pixel_clk);
4908 pthread_mutex_unlock(&mMutex);
4909 goto error_exit;
4910 }
4911 }
4912
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 /* Set batchmode before initializing channel. Since registerBuffer
4914 * internally initializes some of the channels, better set batchmode
4915 * even before first register buffer */
4916 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4917 it != mStreamInfo.end(); it++) {
4918 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4919 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4920 && mBatchSize) {
4921 rc = channel->setBatchSize(mBatchSize);
4922 //Disable per frame map unmap for HFR/batchmode case
4923 rc |= channel->setPerFrameMapUnmap(false);
4924 if (NO_ERROR != rc) {
4925 LOGE("Channel init failed %d", rc);
4926 pthread_mutex_unlock(&mMutex);
4927 goto error_exit;
4928 }
4929 }
4930 }
4931
4932 //First initialize all streams
4933 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4934 it != mStreamInfo.end(); it++) {
4935 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4936 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4937 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004938 setEis) {
4939 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4940 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4941 is_type = mStreamConfigInfo.is_type[i];
4942 break;
4943 }
4944 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004945 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 rc = channel->initialize(IS_TYPE_NONE);
4948 }
4949 if (NO_ERROR != rc) {
4950 LOGE("Channel initialization failed %d", rc);
4951 pthread_mutex_unlock(&mMutex);
4952 goto error_exit;
4953 }
4954 }
4955
4956 if (mRawDumpChannel) {
4957 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4958 if (rc != NO_ERROR) {
4959 LOGE("Error: Raw Dump Channel init failed");
4960 pthread_mutex_unlock(&mMutex);
4961 goto error_exit;
4962 }
4963 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004964 if (mHdrPlusRawSrcChannel) {
4965 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4966 if (rc != NO_ERROR) {
4967 LOGE("Error: HDR+ RAW Source Channel init failed");
4968 pthread_mutex_unlock(&mMutex);
4969 goto error_exit;
4970 }
4971 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004972 if (mSupportChannel) {
4973 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4974 if (rc < 0) {
4975 LOGE("Support channel initialization failed");
4976 pthread_mutex_unlock(&mMutex);
4977 goto error_exit;
4978 }
4979 }
4980 if (mAnalysisChannel) {
4981 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4982 if (rc < 0) {
4983 LOGE("Analysis channel initialization failed");
4984 pthread_mutex_unlock(&mMutex);
4985 goto error_exit;
4986 }
4987 }
4988 if (mDummyBatchChannel) {
4989 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4990 if (rc < 0) {
4991 LOGE("mDummyBatchChannel setBatchSize failed");
4992 pthread_mutex_unlock(&mMutex);
4993 goto error_exit;
4994 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004995 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004996 if (rc < 0) {
4997 LOGE("mDummyBatchChannel initialization failed");
4998 pthread_mutex_unlock(&mMutex);
4999 goto error_exit;
5000 }
5001 }
5002
5003 // Set bundle info
5004 rc = setBundleInfo();
5005 if (rc < 0) {
5006 LOGE("setBundleInfo failed %d", rc);
5007 pthread_mutex_unlock(&mMutex);
5008 goto error_exit;
5009 }
5010
5011 //update settings from app here
5012 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5013 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5014 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5015 }
5016 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5017 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5018 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5019 }
5020 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5021 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5022 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5023
5024 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5025 (mLinkedCameraId != mCameraId) ) {
5026 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5027 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005028 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 goto error_exit;
5030 }
5031 }
5032
5033 // add bundle related cameras
5034 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5035 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005036 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5037 &m_pDualCamCmdPtr->bundle_info;
5038 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005039 if (mIsDeviceLinked)
5040 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5041 else
5042 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5043
5044 pthread_mutex_lock(&gCamLock);
5045
5046 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5047 LOGE("Dualcam: Invalid Session Id ");
5048 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005049 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005050 goto error_exit;
5051 }
5052
5053 if (mIsMainCamera == 1) {
5054 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5055 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005056 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005057 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 // related session id should be session id of linked session
5059 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5060 } else {
5061 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5062 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005063 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005064 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005065 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5066 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005067 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005068 pthread_mutex_unlock(&gCamLock);
5069
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005070 rc = mCameraHandle->ops->set_dual_cam_cmd(
5071 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 if (rc < 0) {
5073 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005074 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005075 goto error_exit;
5076 }
5077 }
5078
5079 //Then start them.
5080 LOGH("Start META Channel");
5081 rc = mMetadataChannel->start();
5082 if (rc < 0) {
5083 LOGE("META channel start failed");
5084 pthread_mutex_unlock(&mMutex);
5085 goto error_exit;
5086 }
5087
5088 if (mAnalysisChannel) {
5089 rc = mAnalysisChannel->start();
5090 if (rc < 0) {
5091 LOGE("Analysis channel start failed");
5092 mMetadataChannel->stop();
5093 pthread_mutex_unlock(&mMutex);
5094 goto error_exit;
5095 }
5096 }
5097
5098 if (mSupportChannel) {
5099 rc = mSupportChannel->start();
5100 if (rc < 0) {
5101 LOGE("Support channel start failed");
5102 mMetadataChannel->stop();
5103 /* Although support and analysis are mutually exclusive today
5104 adding it in anycase for future proofing */
5105 if (mAnalysisChannel) {
5106 mAnalysisChannel->stop();
5107 }
5108 pthread_mutex_unlock(&mMutex);
5109 goto error_exit;
5110 }
5111 }
5112 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5113 it != mStreamInfo.end(); it++) {
5114 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5115 LOGH("Start Processing Channel mask=%d",
5116 channel->getStreamTypeMask());
5117 rc = channel->start();
5118 if (rc < 0) {
5119 LOGE("channel start failed");
5120 pthread_mutex_unlock(&mMutex);
5121 goto error_exit;
5122 }
5123 }
5124
5125 if (mRawDumpChannel) {
5126 LOGD("Starting raw dump stream");
5127 rc = mRawDumpChannel->start();
5128 if (rc != NO_ERROR) {
5129 LOGE("Error Starting Raw Dump Channel");
5130 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5131 it != mStreamInfo.end(); it++) {
5132 QCamera3Channel *channel =
5133 (QCamera3Channel *)(*it)->stream->priv;
5134 LOGH("Stopping Processing Channel mask=%d",
5135 channel->getStreamTypeMask());
5136 channel->stop();
5137 }
5138 if (mSupportChannel)
5139 mSupportChannel->stop();
5140 if (mAnalysisChannel) {
5141 mAnalysisChannel->stop();
5142 }
5143 mMetadataChannel->stop();
5144 pthread_mutex_unlock(&mMutex);
5145 goto error_exit;
5146 }
5147 }
5148
5149 if (mChannelHandle) {
5150
5151 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5152 mChannelHandle);
5153 if (rc != NO_ERROR) {
5154 LOGE("start_channel failed %d", rc);
5155 pthread_mutex_unlock(&mMutex);
5156 goto error_exit;
5157 }
5158 }
5159
5160 goto no_error;
5161error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005162 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 return rc;
5164no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 mWokenUpByDaemon = false;
5166 mPendingLiveRequest = 0;
5167 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 }
5169
Chien-Yu Chenee335912017-02-09 17:53:20 -08005170 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005171 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005172 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5173 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5174 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5175 rc = enableHdrPlusModeLocked();
5176 if (rc != OK) {
5177 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5178 pthread_mutex_unlock(&mMutex);
5179 return rc;
5180 }
5181
5182 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5183 if (mHdrPlusRawSrcChannel) {
5184 rc = mHdrPlusRawSrcChannel->start();
5185 if (rc != OK) {
5186 LOGE("Error Starting HDR+ RAW Channel");
5187 pthread_mutex_unlock(&mMutex);
5188 return rc;
5189 }
5190 }
5191 mFirstPreviewIntentSeen = true;
5192 }
5193
Thierry Strudel3d639192016-09-09 11:52:26 -07005194 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005195 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005196
5197 if (mFlushPerf) {
5198 //we cannot accept any requests during flush
5199 LOGE("process_capture_request cannot proceed during flush");
5200 pthread_mutex_unlock(&mMutex);
5201 return NO_ERROR; //should return an error
5202 }
5203
5204 if (meta.exists(ANDROID_REQUEST_ID)) {
5205 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5206 mCurrentRequestId = request_id;
5207 LOGD("Received request with id: %d", request_id);
5208 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5209 LOGE("Unable to find request id field, \
5210 & no previous id available");
5211 pthread_mutex_unlock(&mMutex);
5212 return NAME_NOT_FOUND;
5213 } else {
5214 LOGD("Re-using old request id");
5215 request_id = mCurrentRequestId;
5216 }
5217
5218 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5219 request->num_output_buffers,
5220 request->input_buffer,
5221 frameNumber);
5222 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005223 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005225 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 uint32_t snapshotStreamId = 0;
5227 for (size_t i = 0; i < request->num_output_buffers; i++) {
5228 const camera3_stream_buffer_t& output = request->output_buffers[i];
5229 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5230
Emilian Peev7650c122017-01-19 08:24:33 -08005231 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5232 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005233 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 blob_request = 1;
5235 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5236 }
5237
5238 if (output.acquire_fence != -1) {
5239 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5240 close(output.acquire_fence);
5241 if (rc != OK) {
5242 LOGE("sync wait failed %d", rc);
5243 pthread_mutex_unlock(&mMutex);
5244 return rc;
5245 }
5246 }
5247
Emilian Peev0f3c3162017-03-15 12:57:46 +00005248 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5249 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005250 depthRequestPresent = true;
5251 continue;
5252 }
5253
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005254 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005256
5257 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5258 isVidBufRequested = true;
5259 }
5260 }
5261
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005262 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5263 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5264 itr++) {
5265 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5266 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5267 channel->getStreamID(channel->getStreamTypeMask());
5268
5269 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5270 isVidBufRequested = true;
5271 }
5272 }
5273
Thierry Strudel3d639192016-09-09 11:52:26 -07005274 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005275 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005276 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005277 }
5278 if (blob_request && mRawDumpChannel) {
5279 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005280 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005282 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005283 }
5284
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005285 {
5286 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5287 // Request a RAW buffer if
5288 // 1. mHdrPlusRawSrcChannel is valid.
5289 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5290 // 3. There is no pending HDR+ request.
5291 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5292 mHdrPlusPendingRequests.size() == 0) {
5293 streamsArray.stream_request[streamsArray.num_streams].streamID =
5294 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5295 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5296 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005297 }
5298
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005299 //extract capture intent
5300 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5301 mCaptureIntent =
5302 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5303 }
5304
5305 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5306 mCacMode =
5307 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5308 }
5309
5310 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005311 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005312
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005313 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005314 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005315 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5316 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005317 }
5318
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005319 if (hdrPlusRequest) {
5320 // For a HDR+ request, just set the frame parameters.
5321 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5322 if (rc < 0) {
5323 LOGE("fail to set frame parameters");
5324 pthread_mutex_unlock(&mMutex);
5325 return rc;
5326 }
5327 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005328 /* Parse the settings:
5329 * - For every request in NORMAL MODE
5330 * - For every request in HFR mode during preview only case
5331 * - For first request of every batch in HFR mode during video
5332 * recording. In batchmode the same settings except frame number is
5333 * repeated in each request of the batch.
5334 */
5335 if (!mBatchSize ||
5336 (mBatchSize && !isVidBufRequested) ||
5337 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005338 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005339 if (rc < 0) {
5340 LOGE("fail to set frame parameters");
5341 pthread_mutex_unlock(&mMutex);
5342 return rc;
5343 }
5344 }
5345 /* For batchMode HFR, setFrameParameters is not called for every
5346 * request. But only frame number of the latest request is parsed.
5347 * Keep track of first and last frame numbers in a batch so that
5348 * metadata for the frame numbers of batch can be duplicated in
5349 * handleBatchMetadta */
5350 if (mBatchSize) {
5351 if (!mToBeQueuedVidBufs) {
5352 //start of the batch
5353 mFirstFrameNumberInBatch = request->frame_number;
5354 }
5355 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5356 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5357 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005358 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005359 return BAD_VALUE;
5360 }
5361 }
5362 if (mNeedSensorRestart) {
5363 /* Unlock the mutex as restartSensor waits on the channels to be
5364 * stopped, which in turn calls stream callback functions -
5365 * handleBufferWithLock and handleMetadataWithLock */
5366 pthread_mutex_unlock(&mMutex);
5367 rc = dynamicUpdateMetaStreamInfo();
5368 if (rc != NO_ERROR) {
5369 LOGE("Restarting the sensor failed");
5370 return BAD_VALUE;
5371 }
5372 mNeedSensorRestart = false;
5373 pthread_mutex_lock(&mMutex);
5374 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005375 if(mResetInstantAEC) {
5376 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5377 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5378 mResetInstantAEC = false;
5379 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005380 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 if (request->input_buffer->acquire_fence != -1) {
5382 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5383 close(request->input_buffer->acquire_fence);
5384 if (rc != OK) {
5385 LOGE("input buffer sync wait failed %d", rc);
5386 pthread_mutex_unlock(&mMutex);
5387 return rc;
5388 }
5389 }
5390 }
5391
5392 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5393 mLastCustIntentFrmNum = frameNumber;
5394 }
5395 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005396 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 pendingRequestIterator latestRequest;
5398 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005399 pendingRequest.num_buffers = depthRequestPresent ?
5400 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 pendingRequest.request_id = request_id;
5402 pendingRequest.blob_request = blob_request;
5403 pendingRequest.timestamp = 0;
5404 pendingRequest.bUrgentReceived = 0;
5405 if (request->input_buffer) {
5406 pendingRequest.input_buffer =
5407 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5408 *(pendingRequest.input_buffer) = *(request->input_buffer);
5409 pInputBuffer = pendingRequest.input_buffer;
5410 } else {
5411 pendingRequest.input_buffer = NULL;
5412 pInputBuffer = NULL;
5413 }
5414
5415 pendingRequest.pipeline_depth = 0;
5416 pendingRequest.partial_result_cnt = 0;
5417 extractJpegMetadata(mCurJpegMeta, request);
5418 pendingRequest.jpegMetadata = mCurJpegMeta;
5419 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5420 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005422 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5423 mHybridAeEnable =
5424 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5425 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005426
5427 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5428 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005429 /* DevCamDebug metadata processCaptureRequest */
5430 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5431 mDevCamDebugMetaEnable =
5432 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5433 }
5434 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5435 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005436
5437 //extract CAC info
5438 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5439 mCacMode =
5440 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5441 }
5442 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005443 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005444
5445 PendingBuffersInRequest bufsForCurRequest;
5446 bufsForCurRequest.frame_number = frameNumber;
5447 // Mark current timestamp for the new request
5448 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005449 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005450
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005451 if (hdrPlusRequest) {
5452 // Save settings for this request.
5453 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5454 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5455
5456 // Add to pending HDR+ request queue.
5457 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5458 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5459
5460 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5461 }
5462
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005464 if ((request->output_buffers[i].stream->data_space ==
5465 HAL_DATASPACE_DEPTH) &&
5466 (HAL_PIXEL_FORMAT_BLOB ==
5467 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005468 continue;
5469 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005470 RequestedBufferInfo requestedBuf;
5471 memset(&requestedBuf, 0, sizeof(requestedBuf));
5472 requestedBuf.stream = request->output_buffers[i].stream;
5473 requestedBuf.buffer = NULL;
5474 pendingRequest.buffers.push_back(requestedBuf);
5475
5476 // Add to buffer handle the pending buffers list
5477 PendingBufferInfo bufferInfo;
5478 bufferInfo.buffer = request->output_buffers[i].buffer;
5479 bufferInfo.stream = request->output_buffers[i].stream;
5480 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5481 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5482 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5483 frameNumber, bufferInfo.buffer,
5484 channel->getStreamTypeMask(), bufferInfo.stream->format);
5485 }
5486 // Add this request packet into mPendingBuffersMap
5487 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5488 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5489 mPendingBuffersMap.get_num_overall_buffers());
5490
5491 latestRequest = mPendingRequestsList.insert(
5492 mPendingRequestsList.end(), pendingRequest);
5493 if(mFlush) {
5494 LOGI("mFlush is true");
5495 pthread_mutex_unlock(&mMutex);
5496 return NO_ERROR;
5497 }
5498
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005499 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5500 // channel.
5501 if (!hdrPlusRequest) {
5502 int indexUsed;
5503 // Notify metadata channel we receive a request
5504 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005505
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005506 if(request->input_buffer != NULL){
5507 LOGD("Input request, frame_number %d", frameNumber);
5508 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5509 if (NO_ERROR != rc) {
5510 LOGE("fail to set reproc parameters");
5511 pthread_mutex_unlock(&mMutex);
5512 return rc;
5513 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005514 }
5515
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005516 // Call request on other streams
5517 uint32_t streams_need_metadata = 0;
5518 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5519 for (size_t i = 0; i < request->num_output_buffers; i++) {
5520 const camera3_stream_buffer_t& output = request->output_buffers[i];
5521 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5522
5523 if (channel == NULL) {
5524 LOGW("invalid channel pointer for stream");
5525 continue;
5526 }
5527
5528 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5529 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5530 output.buffer, request->input_buffer, frameNumber);
5531 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005532 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005533 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5534 if (rc < 0) {
5535 LOGE("Fail to request on picture channel");
5536 pthread_mutex_unlock(&mMutex);
5537 return rc;
5538 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005539 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005540 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5541 assert(NULL != mDepthChannel);
5542 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005543
Emilian Peev7650c122017-01-19 08:24:33 -08005544 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5545 if (rc < 0) {
5546 LOGE("Fail to map on depth buffer");
5547 pthread_mutex_unlock(&mMutex);
5548 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005549 }
Emilian Peev7650c122017-01-19 08:24:33 -08005550 } else {
5551 LOGD("snapshot request with buffer %p, frame_number %d",
5552 output.buffer, frameNumber);
5553 if (!request->settings) {
5554 rc = channel->request(output.buffer, frameNumber,
5555 NULL, mPrevParameters, indexUsed);
5556 } else {
5557 rc = channel->request(output.buffer, frameNumber,
5558 NULL, mParameters, indexUsed);
5559 }
5560 if (rc < 0) {
5561 LOGE("Fail to request on picture channel");
5562 pthread_mutex_unlock(&mMutex);
5563 return rc;
5564 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565
Emilian Peev7650c122017-01-19 08:24:33 -08005566 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5567 uint32_t j = 0;
5568 for (j = 0; j < streamsArray.num_streams; j++) {
5569 if (streamsArray.stream_request[j].streamID == streamId) {
5570 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5571 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5572 else
5573 streamsArray.stream_request[j].buf_index = indexUsed;
5574 break;
5575 }
5576 }
5577 if (j == streamsArray.num_streams) {
5578 LOGE("Did not find matching stream to update index");
5579 assert(0);
5580 }
5581
5582 pendingBufferIter->need_metadata = true;
5583 streams_need_metadata++;
5584 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005585 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5587 bool needMetadata = false;
5588 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5589 rc = yuvChannel->request(output.buffer, frameNumber,
5590 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5591 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005592 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005593 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005594 pthread_mutex_unlock(&mMutex);
5595 return rc;
5596 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005597
5598 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5599 uint32_t j = 0;
5600 for (j = 0; j < streamsArray.num_streams; j++) {
5601 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005602 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5603 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5604 else
5605 streamsArray.stream_request[j].buf_index = indexUsed;
5606 break;
5607 }
5608 }
5609 if (j == streamsArray.num_streams) {
5610 LOGE("Did not find matching stream to update index");
5611 assert(0);
5612 }
5613
5614 pendingBufferIter->need_metadata = needMetadata;
5615 if (needMetadata)
5616 streams_need_metadata += 1;
5617 LOGD("calling YUV channel request, need_metadata is %d",
5618 needMetadata);
5619 } else {
5620 LOGD("request with buffer %p, frame_number %d",
5621 output.buffer, frameNumber);
5622
5623 rc = channel->request(output.buffer, frameNumber, indexUsed);
5624
5625 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5626 uint32_t j = 0;
5627 for (j = 0; j < streamsArray.num_streams; j++) {
5628 if (streamsArray.stream_request[j].streamID == streamId) {
5629 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5630 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5631 else
5632 streamsArray.stream_request[j].buf_index = indexUsed;
5633 break;
5634 }
5635 }
5636 if (j == streamsArray.num_streams) {
5637 LOGE("Did not find matching stream to update index");
5638 assert(0);
5639 }
5640
5641 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5642 && mBatchSize) {
5643 mToBeQueuedVidBufs++;
5644 if (mToBeQueuedVidBufs == mBatchSize) {
5645 channel->queueBatchBuf();
5646 }
5647 }
5648 if (rc < 0) {
5649 LOGE("request failed");
5650 pthread_mutex_unlock(&mMutex);
5651 return rc;
5652 }
5653 }
5654 pendingBufferIter++;
5655 }
5656
5657 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5658 itr++) {
5659 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5660
5661 if (channel == NULL) {
5662 LOGE("invalid channel pointer for stream");
5663 assert(0);
5664 return BAD_VALUE;
5665 }
5666
5667 InternalRequest requestedStream;
5668 requestedStream = (*itr);
5669
5670
5671 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5672 LOGD("snapshot request internally input buffer %p, frame_number %d",
5673 request->input_buffer, frameNumber);
5674 if(request->input_buffer != NULL){
5675 rc = channel->request(NULL, frameNumber,
5676 pInputBuffer, &mReprocMeta, indexUsed, true,
5677 requestedStream.meteringOnly);
5678 if (rc < 0) {
5679 LOGE("Fail to request on picture channel");
5680 pthread_mutex_unlock(&mMutex);
5681 return rc;
5682 }
5683 } else {
5684 LOGD("snapshot request with frame_number %d", frameNumber);
5685 if (!request->settings) {
5686 rc = channel->request(NULL, frameNumber,
5687 NULL, mPrevParameters, indexUsed, true,
5688 requestedStream.meteringOnly);
5689 } else {
5690 rc = channel->request(NULL, frameNumber,
5691 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5692 }
5693 if (rc < 0) {
5694 LOGE("Fail to request on picture channel");
5695 pthread_mutex_unlock(&mMutex);
5696 return rc;
5697 }
5698
5699 if ((*itr).meteringOnly != 1) {
5700 requestedStream.need_metadata = 1;
5701 streams_need_metadata++;
5702 }
5703 }
5704
5705 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5706 uint32_t j = 0;
5707 for (j = 0; j < streamsArray.num_streams; j++) {
5708 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005709 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5710 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5711 else
5712 streamsArray.stream_request[j].buf_index = indexUsed;
5713 break;
5714 }
5715 }
5716 if (j == streamsArray.num_streams) {
5717 LOGE("Did not find matching stream to update index");
5718 assert(0);
5719 }
5720
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005721 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005722 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005723 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005724 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005725 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005726 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005727 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005728
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005729 //If 2 streams have need_metadata set to true, fail the request, unless
5730 //we copy/reference count the metadata buffer
5731 if (streams_need_metadata > 1) {
5732 LOGE("not supporting request in which two streams requires"
5733 " 2 HAL metadata for reprocessing");
5734 pthread_mutex_unlock(&mMutex);
5735 return -EINVAL;
5736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005737
Emilian Peev7650c122017-01-19 08:24:33 -08005738 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5740 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5741 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5742 pthread_mutex_unlock(&mMutex);
5743 return BAD_VALUE;
5744 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005745 if (request->input_buffer == NULL) {
5746 /* Set the parameters to backend:
5747 * - For every request in NORMAL MODE
5748 * - For every request in HFR mode during preview only case
5749 * - Once every batch in HFR mode during video recording
5750 */
5751 if (!mBatchSize ||
5752 (mBatchSize && !isVidBufRequested) ||
5753 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5754 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5755 mBatchSize, isVidBufRequested,
5756 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005757
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005758 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5759 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5760 uint32_t m = 0;
5761 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5762 if (streamsArray.stream_request[k].streamID ==
5763 mBatchedStreamsArray.stream_request[m].streamID)
5764 break;
5765 }
5766 if (m == mBatchedStreamsArray.num_streams) {
5767 mBatchedStreamsArray.stream_request\
5768 [mBatchedStreamsArray.num_streams].streamID =
5769 streamsArray.stream_request[k].streamID;
5770 mBatchedStreamsArray.stream_request\
5771 [mBatchedStreamsArray.num_streams].buf_index =
5772 streamsArray.stream_request[k].buf_index;
5773 mBatchedStreamsArray.num_streams =
5774 mBatchedStreamsArray.num_streams + 1;
5775 }
5776 }
5777 streamsArray = mBatchedStreamsArray;
5778 }
5779 /* Update stream id of all the requested buffers */
5780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5781 streamsArray)) {
5782 LOGE("Failed to set stream type mask in the parameters");
5783 return BAD_VALUE;
5784 }
5785
5786 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5787 mParameters);
5788 if (rc < 0) {
5789 LOGE("set_parms failed");
5790 }
5791 /* reset to zero coz, the batch is queued */
5792 mToBeQueuedVidBufs = 0;
5793 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5794 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5795 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005796 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5797 uint32_t m = 0;
5798 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5799 if (streamsArray.stream_request[k].streamID ==
5800 mBatchedStreamsArray.stream_request[m].streamID)
5801 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005802 }
5803 if (m == mBatchedStreamsArray.num_streams) {
5804 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5805 streamID = streamsArray.stream_request[k].streamID;
5806 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5807 buf_index = streamsArray.stream_request[k].buf_index;
5808 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5809 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005810 }
5811 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005812 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005813 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005814 }
5815
5816 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5817
5818 mState = STARTED;
5819 // Added a timed condition wait
5820 struct timespec ts;
5821 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005822 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005823 if (rc < 0) {
5824 isValidTimeout = 0;
5825 LOGE("Error reading the real time clock!!");
5826 }
5827 else {
5828 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005829 int64_t timeout = 5;
5830 {
5831 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5832 // If there is a pending HDR+ request, the following requests may be blocked until the
5833 // HDR+ request is done. So allow a longer timeout.
5834 if (mHdrPlusPendingRequests.size() > 0) {
5835 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5836 }
5837 }
5838 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005839 }
5840 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005841 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005842 (mState != ERROR) && (mState != DEINIT)) {
5843 if (!isValidTimeout) {
5844 LOGD("Blocking on conditional wait");
5845 pthread_cond_wait(&mRequestCond, &mMutex);
5846 }
5847 else {
5848 LOGD("Blocking on timed conditional wait");
5849 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5850 if (rc == ETIMEDOUT) {
5851 rc = -ENODEV;
5852 LOGE("Unblocked on timeout!!!!");
5853 break;
5854 }
5855 }
5856 LOGD("Unblocked");
5857 if (mWokenUpByDaemon) {
5858 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005859 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005860 break;
5861 }
5862 }
5863 pthread_mutex_unlock(&mMutex);
5864
5865 return rc;
5866}
5867
5868/*===========================================================================
5869 * FUNCTION : dump
5870 *
5871 * DESCRIPTION:
5872 *
5873 * PARAMETERS :
5874 *
5875 *
5876 * RETURN :
5877 *==========================================================================*/
5878void QCamera3HardwareInterface::dump(int fd)
5879{
5880 pthread_mutex_lock(&mMutex);
5881 dprintf(fd, "\n Camera HAL3 information Begin \n");
5882
5883 dprintf(fd, "\nNumber of pending requests: %zu \n",
5884 mPendingRequestsList.size());
5885 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5886 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5887 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5888 for(pendingRequestIterator i = mPendingRequestsList.begin();
5889 i != mPendingRequestsList.end(); i++) {
5890 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5891 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5892 i->input_buffer);
5893 }
5894 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5895 mPendingBuffersMap.get_num_overall_buffers());
5896 dprintf(fd, "-------+------------------\n");
5897 dprintf(fd, " Frame | Stream type mask \n");
5898 dprintf(fd, "-------+------------------\n");
5899 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5900 for(auto &j : req.mPendingBufferList) {
5901 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5902 dprintf(fd, " %5d | %11d \n",
5903 req.frame_number, channel->getStreamTypeMask());
5904 }
5905 }
5906 dprintf(fd, "-------+------------------\n");
5907
5908 dprintf(fd, "\nPending frame drop list: %zu\n",
5909 mPendingFrameDropList.size());
5910 dprintf(fd, "-------+-----------\n");
5911 dprintf(fd, " Frame | Stream ID \n");
5912 dprintf(fd, "-------+-----------\n");
5913 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5914 i != mPendingFrameDropList.end(); i++) {
5915 dprintf(fd, " %5d | %9d \n",
5916 i->frame_number, i->stream_ID);
5917 }
5918 dprintf(fd, "-------+-----------\n");
5919
5920 dprintf(fd, "\n Camera HAL3 information End \n");
5921
5922 /* use dumpsys media.camera as trigger to send update debug level event */
5923 mUpdateDebugLevel = true;
5924 pthread_mutex_unlock(&mMutex);
5925 return;
5926}
5927
5928/*===========================================================================
5929 * FUNCTION : flush
5930 *
5931 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5932 * conditionally restarts channels
5933 *
5934 * PARAMETERS :
5935 * @ restartChannels: re-start all channels
5936 *
5937 *
5938 * RETURN :
5939 * 0 on success
5940 * Error code on failure
5941 *==========================================================================*/
5942int QCamera3HardwareInterface::flush(bool restartChannels)
5943{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005944 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005945 int32_t rc = NO_ERROR;
5946
5947 LOGD("Unblocking Process Capture Request");
5948 pthread_mutex_lock(&mMutex);
5949 mFlush = true;
5950 pthread_mutex_unlock(&mMutex);
5951
5952 rc = stopAllChannels();
5953 // unlink of dualcam
5954 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005955 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5956 &m_pDualCamCmdPtr->bundle_info;
5957 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005958 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5959 pthread_mutex_lock(&gCamLock);
5960
5961 if (mIsMainCamera == 1) {
5962 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5963 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005964 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005965 // related session id should be session id of linked session
5966 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5967 } else {
5968 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5969 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005970 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005971 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5972 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005973 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005974 pthread_mutex_unlock(&gCamLock);
5975
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005976 rc = mCameraHandle->ops->set_dual_cam_cmd(
5977 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005978 if (rc < 0) {
5979 LOGE("Dualcam: Unlink failed, but still proceed to close");
5980 }
5981 }
5982
5983 if (rc < 0) {
5984 LOGE("stopAllChannels failed");
5985 return rc;
5986 }
5987 if (mChannelHandle) {
5988 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5989 mChannelHandle);
5990 }
5991
5992 // Reset bundle info
5993 rc = setBundleInfo();
5994 if (rc < 0) {
5995 LOGE("setBundleInfo failed %d", rc);
5996 return rc;
5997 }
5998
5999 // Mutex Lock
6000 pthread_mutex_lock(&mMutex);
6001
6002 // Unblock process_capture_request
6003 mPendingLiveRequest = 0;
6004 pthread_cond_signal(&mRequestCond);
6005
6006 rc = notifyErrorForPendingRequests();
6007 if (rc < 0) {
6008 LOGE("notifyErrorForPendingRequests failed");
6009 pthread_mutex_unlock(&mMutex);
6010 return rc;
6011 }
6012
6013 mFlush = false;
6014
6015 // Start the Streams/Channels
6016 if (restartChannels) {
6017 rc = startAllChannels();
6018 if (rc < 0) {
6019 LOGE("startAllChannels failed");
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006023 if (mChannelHandle) {
6024 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6025 mChannelHandle);
6026 if (rc < 0) {
6027 LOGE("start_channel failed");
6028 pthread_mutex_unlock(&mMutex);
6029 return rc;
6030 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006031 }
6032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006033 pthread_mutex_unlock(&mMutex);
6034
6035 return 0;
6036}
6037
6038/*===========================================================================
6039 * FUNCTION : flushPerf
6040 *
6041 * DESCRIPTION: This is the performance optimization version of flush that does
6042 * not use stream off, rather flushes the system
6043 *
6044 * PARAMETERS :
6045 *
6046 *
6047 * RETURN : 0 : success
6048 * -EINVAL: input is malformed (device is not valid)
6049 * -ENODEV: if the device has encountered a serious error
6050 *==========================================================================*/
6051int QCamera3HardwareInterface::flushPerf()
6052{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006053 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006054 int32_t rc = 0;
6055 struct timespec timeout;
6056 bool timed_wait = false;
6057
6058 pthread_mutex_lock(&mMutex);
6059 mFlushPerf = true;
6060 mPendingBuffersMap.numPendingBufsAtFlush =
6061 mPendingBuffersMap.get_num_overall_buffers();
6062 LOGD("Calling flush. Wait for %d buffers to return",
6063 mPendingBuffersMap.numPendingBufsAtFlush);
6064
6065 /* send the flush event to the backend */
6066 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6067 if (rc < 0) {
6068 LOGE("Error in flush: IOCTL failure");
6069 mFlushPerf = false;
6070 pthread_mutex_unlock(&mMutex);
6071 return -ENODEV;
6072 }
6073
6074 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6075 LOGD("No pending buffers in HAL, return flush");
6076 mFlushPerf = false;
6077 pthread_mutex_unlock(&mMutex);
6078 return rc;
6079 }
6080
6081 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006082 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006083 if (rc < 0) {
6084 LOGE("Error reading the real time clock, cannot use timed wait");
6085 } else {
6086 timeout.tv_sec += FLUSH_TIMEOUT;
6087 timed_wait = true;
6088 }
6089
6090 //Block on conditional variable
6091 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6092 LOGD("Waiting on mBuffersCond");
6093 if (!timed_wait) {
6094 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6095 if (rc != 0) {
6096 LOGE("pthread_cond_wait failed due to rc = %s",
6097 strerror(rc));
6098 break;
6099 }
6100 } else {
6101 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6102 if (rc != 0) {
6103 LOGE("pthread_cond_timedwait failed due to rc = %s",
6104 strerror(rc));
6105 break;
6106 }
6107 }
6108 }
6109 if (rc != 0) {
6110 mFlushPerf = false;
6111 pthread_mutex_unlock(&mMutex);
6112 return -ENODEV;
6113 }
6114
6115 LOGD("Received buffers, now safe to return them");
6116
6117 //make sure the channels handle flush
6118 //currently only required for the picture channel to release snapshot resources
6119 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6120 it != mStreamInfo.end(); it++) {
6121 QCamera3Channel *channel = (*it)->channel;
6122 if (channel) {
6123 rc = channel->flush();
6124 if (rc) {
6125 LOGE("Flushing the channels failed with error %d", rc);
6126 // even though the channel flush failed we need to continue and
6127 // return the buffers we have to the framework, however the return
6128 // value will be an error
6129 rc = -ENODEV;
6130 }
6131 }
6132 }
6133
6134 /* notify the frameworks and send errored results */
6135 rc = notifyErrorForPendingRequests();
6136 if (rc < 0) {
6137 LOGE("notifyErrorForPendingRequests failed");
6138 pthread_mutex_unlock(&mMutex);
6139 return rc;
6140 }
6141
6142 //unblock process_capture_request
6143 mPendingLiveRequest = 0;
6144 unblockRequestIfNecessary();
6145
6146 mFlushPerf = false;
6147 pthread_mutex_unlock(&mMutex);
6148 LOGD ("Flush Operation complete. rc = %d", rc);
6149 return rc;
6150}
6151
6152/*===========================================================================
6153 * FUNCTION : handleCameraDeviceError
6154 *
6155 * DESCRIPTION: This function calls internal flush and notifies the error to
6156 * framework and updates the state variable.
6157 *
6158 * PARAMETERS : None
6159 *
6160 * RETURN : NO_ERROR on Success
6161 * Error code on failure
6162 *==========================================================================*/
6163int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6164{
6165 int32_t rc = NO_ERROR;
6166
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006167 {
6168 Mutex::Autolock lock(mFlushLock);
6169 pthread_mutex_lock(&mMutex);
6170 if (mState != ERROR) {
6171 //if mState != ERROR, nothing to be done
6172 pthread_mutex_unlock(&mMutex);
6173 return NO_ERROR;
6174 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006175 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006176
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006177 rc = flush(false /* restart channels */);
6178 if (NO_ERROR != rc) {
6179 LOGE("internal flush to handle mState = ERROR failed");
6180 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006181
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006182 pthread_mutex_lock(&mMutex);
6183 mState = DEINIT;
6184 pthread_mutex_unlock(&mMutex);
6185 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006186
6187 camera3_notify_msg_t notify_msg;
6188 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6189 notify_msg.type = CAMERA3_MSG_ERROR;
6190 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6191 notify_msg.message.error.error_stream = NULL;
6192 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006193 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006194
6195 return rc;
6196}
6197
6198/*===========================================================================
6199 * FUNCTION : captureResultCb
6200 *
6201 * DESCRIPTION: Callback handler for all capture result
6202 * (streams, as well as metadata)
6203 *
6204 * PARAMETERS :
6205 * @metadata : metadata information
6206 * @buffer : actual gralloc buffer to be returned to frameworks.
6207 * NULL if metadata.
6208 *
6209 * RETURN : NONE
6210 *==========================================================================*/
6211void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6212 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6213{
6214 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006215 pthread_mutex_lock(&mMutex);
6216 uint8_t batchSize = mBatchSize;
6217 pthread_mutex_unlock(&mMutex);
6218 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006219 handleBatchMetadata(metadata_buf,
6220 true /* free_and_bufdone_meta_buf */);
6221 } else { /* mBatchSize = 0 */
6222 hdrPlusPerfLock(metadata_buf);
6223 pthread_mutex_lock(&mMutex);
6224 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006225 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006226 true /* last urgent frame of batch metadata */,
6227 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006228 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006229 pthread_mutex_unlock(&mMutex);
6230 }
6231 } else if (isInputBuffer) {
6232 pthread_mutex_lock(&mMutex);
6233 handleInputBufferWithLock(frame_number);
6234 pthread_mutex_unlock(&mMutex);
6235 } else {
6236 pthread_mutex_lock(&mMutex);
6237 handleBufferWithLock(buffer, frame_number);
6238 pthread_mutex_unlock(&mMutex);
6239 }
6240 return;
6241}
6242
6243/*===========================================================================
6244 * FUNCTION : getReprocessibleOutputStreamId
6245 *
6246 * DESCRIPTION: Get source output stream id for the input reprocess stream
6247 * based on size and format, which would be the largest
6248 * output stream if an input stream exists.
6249 *
6250 * PARAMETERS :
6251 * @id : return the stream id if found
6252 *
6253 * RETURN : int32_t type of status
6254 * NO_ERROR -- success
6255 * none-zero failure code
6256 *==========================================================================*/
6257int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6258{
6259 /* check if any output or bidirectional stream with the same size and format
6260 and return that stream */
6261 if ((mInputStreamInfo.dim.width > 0) &&
6262 (mInputStreamInfo.dim.height > 0)) {
6263 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6264 it != mStreamInfo.end(); it++) {
6265
6266 camera3_stream_t *stream = (*it)->stream;
6267 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6268 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6269 (stream->format == mInputStreamInfo.format)) {
6270 // Usage flag for an input stream and the source output stream
6271 // may be different.
6272 LOGD("Found reprocessible output stream! %p", *it);
6273 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6274 stream->usage, mInputStreamInfo.usage);
6275
6276 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6277 if (channel != NULL && channel->mStreams[0]) {
6278 id = channel->mStreams[0]->getMyServerID();
6279 return NO_ERROR;
6280 }
6281 }
6282 }
6283 } else {
6284 LOGD("No input stream, so no reprocessible output stream");
6285 }
6286 return NAME_NOT_FOUND;
6287}
6288
6289/*===========================================================================
6290 * FUNCTION : lookupFwkName
6291 *
6292 * DESCRIPTION: In case the enum is not same in fwk and backend
6293 * make sure the parameter is correctly propogated
6294 *
6295 * PARAMETERS :
6296 * @arr : map between the two enums
6297 * @len : len of the map
6298 * @hal_name : name of the hal_parm to map
6299 *
6300 * RETURN : int type of status
6301 * fwk_name -- success
6302 * none-zero failure code
6303 *==========================================================================*/
6304template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6305 size_t len, halType hal_name)
6306{
6307
6308 for (size_t i = 0; i < len; i++) {
6309 if (arr[i].hal_name == hal_name) {
6310 return arr[i].fwk_name;
6311 }
6312 }
6313
6314 /* Not able to find matching framework type is not necessarily
6315 * an error case. This happens when mm-camera supports more attributes
6316 * than the frameworks do */
6317 LOGH("Cannot find matching framework type");
6318 return NAME_NOT_FOUND;
6319}
6320
6321/*===========================================================================
6322 * FUNCTION : lookupHalName
6323 *
6324 * DESCRIPTION: In case the enum is not same in fwk and backend
6325 * make sure the parameter is correctly propogated
6326 *
6327 * PARAMETERS :
6328 * @arr : map between the two enums
6329 * @len : len of the map
6330 * @fwk_name : name of the hal_parm to map
6331 *
6332 * RETURN : int32_t type of status
6333 * hal_name -- success
6334 * none-zero failure code
6335 *==========================================================================*/
6336template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6337 size_t len, fwkType fwk_name)
6338{
6339 for (size_t i = 0; i < len; i++) {
6340 if (arr[i].fwk_name == fwk_name) {
6341 return arr[i].hal_name;
6342 }
6343 }
6344
6345 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6346 return NAME_NOT_FOUND;
6347}
6348
6349/*===========================================================================
6350 * FUNCTION : lookupProp
6351 *
6352 * DESCRIPTION: lookup a value by its name
6353 *
6354 * PARAMETERS :
6355 * @arr : map between the two enums
6356 * @len : size of the map
6357 * @name : name to be looked up
6358 *
6359 * RETURN : Value if found
6360 * CAM_CDS_MODE_MAX if not found
6361 *==========================================================================*/
6362template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6363 size_t len, const char *name)
6364{
6365 if (name) {
6366 for (size_t i = 0; i < len; i++) {
6367 if (!strcmp(arr[i].desc, name)) {
6368 return arr[i].val;
6369 }
6370 }
6371 }
6372 return CAM_CDS_MODE_MAX;
6373}
6374
6375/*===========================================================================
6376 *
6377 * DESCRIPTION:
6378 *
6379 * PARAMETERS :
6380 * @metadata : metadata information from callback
6381 * @timestamp: metadata buffer timestamp
6382 * @request_id: request id
6383 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006384 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006385 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6386 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006387 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006388 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6389 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006390 *
6391 * RETURN : camera_metadata_t*
6392 * metadata in a format specified by fwk
6393 *==========================================================================*/
6394camera_metadata_t*
6395QCamera3HardwareInterface::translateFromHalMetadata(
6396 metadata_buffer_t *metadata,
6397 nsecs_t timestamp,
6398 int32_t request_id,
6399 const CameraMetadata& jpegMetadata,
6400 uint8_t pipeline_depth,
6401 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006402 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006403 /* DevCamDebug metadata translateFromHalMetadata argument */
6404 uint8_t DevCamDebug_meta_enable,
6405 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006406 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006407 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006408 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006409{
6410 CameraMetadata camMetadata;
6411 camera_metadata_t *resultMetadata;
6412
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006413 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006414 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6415 * Timestamp is needed because it's used for shutter notify calculation.
6416 * */
6417 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6418 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006419 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006420 }
6421
Thierry Strudel3d639192016-09-09 11:52:26 -07006422 if (jpegMetadata.entryCount())
6423 camMetadata.append(jpegMetadata);
6424
6425 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6426 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6427 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6428 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006429 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006430 if (mBatchSize == 0) {
6431 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6432 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6433 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006434
Samuel Ha68ba5172016-12-15 18:41:12 -08006435 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6436 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6437 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6438 // DevCamDebug metadata translateFromHalMetadata AF
6439 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6440 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6441 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6442 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6443 }
6444 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6445 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6446 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6447 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6448 }
6449 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6450 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6451 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6452 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6453 }
6454 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6455 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6456 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6457 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6458 }
6459 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6460 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6461 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6462 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6463 }
6464 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6465 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6466 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6467 *DevCamDebug_af_monitor_pdaf_target_pos;
6468 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6469 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6470 }
6471 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6472 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6473 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6474 *DevCamDebug_af_monitor_pdaf_confidence;
6475 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6476 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6477 }
6478 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6479 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6480 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6481 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6482 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6483 }
6484 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6485 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6486 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6487 *DevCamDebug_af_monitor_tof_target_pos;
6488 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6489 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6490 }
6491 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6492 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6493 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6494 *DevCamDebug_af_monitor_tof_confidence;
6495 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6496 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6497 }
6498 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6499 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6500 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6501 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6502 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6503 }
6504 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6505 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6506 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6507 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6508 &fwk_DevCamDebug_af_monitor_type_select, 1);
6509 }
6510 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6511 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6512 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6513 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6514 &fwk_DevCamDebug_af_monitor_refocus, 1);
6515 }
6516 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6517 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6518 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6519 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6520 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6521 }
6522 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6523 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6524 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6525 *DevCamDebug_af_search_pdaf_target_pos;
6526 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6527 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6528 }
6529 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6530 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6531 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6532 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6533 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6534 }
6535 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6536 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6537 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6538 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6539 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6540 }
6541 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6542 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6543 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6544 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6545 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6546 }
6547 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6548 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6549 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6550 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6551 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6552 }
6553 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6554 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6555 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6556 *DevCamDebug_af_search_tof_target_pos;
6557 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6558 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6559 }
6560 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6561 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6562 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6563 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6564 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6565 }
6566 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6567 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6568 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6569 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6570 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6571 }
6572 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6573 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6574 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6575 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6576 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6577 }
6578 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6579 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6580 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6581 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6582 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6583 }
6584 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6585 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6586 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6587 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6588 &fwk_DevCamDebug_af_search_type_select, 1);
6589 }
6590 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6591 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6592 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6593 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6594 &fwk_DevCamDebug_af_search_next_pos, 1);
6595 }
6596 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6597 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6598 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6599 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6600 &fwk_DevCamDebug_af_search_target_pos, 1);
6601 }
6602 // DevCamDebug metadata translateFromHalMetadata AEC
6603 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6604 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6605 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6606 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6607 }
6608 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6609 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6610 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6611 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6612 }
6613 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6614 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6615 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6616 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6617 }
6618 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6619 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6620 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6621 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6622 }
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6624 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6625 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6626 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6627 }
6628 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6629 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6630 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6631 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6634 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6635 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6636 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6637 }
6638 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6639 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6640 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6641 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6642 }
Samuel Ha34229982017-02-17 13:51:11 -08006643 // DevCamDebug metadata translateFromHalMetadata zzHDR
6644 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6645 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6646 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6647 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6648 }
6649 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6650 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6651 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6652 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6653 }
6654 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6655 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6656 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6657 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6660 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6661 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6662 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6663 }
6664 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6665 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6666 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6667 *DevCamDebug_aec_hdr_sensitivity_ratio;
6668 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6669 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6670 }
6671 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6672 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6673 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6674 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6675 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6676 }
6677 // DevCamDebug metadata translateFromHalMetadata ADRC
6678 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6679 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6680 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6681 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6682 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6683 }
6684 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6685 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6686 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6687 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6688 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6689 }
6690 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6691 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6692 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6693 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6694 }
6695 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6696 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6697 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6698 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6699 }
6700 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6701 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6702 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6703 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6704 }
6705 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6706 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6707 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6708 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6709 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006710 // DevCamDebug metadata translateFromHalMetadata AWB
6711 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6712 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6713 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6714 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6715 }
6716 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6717 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6718 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6719 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6720 }
6721 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6722 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6723 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6724 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6727 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6728 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6729 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6730 }
6731 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6732 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6733 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6734 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6735 }
6736 }
6737 // atrace_end(ATRACE_TAG_ALWAYS);
6738
Thierry Strudel3d639192016-09-09 11:52:26 -07006739 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6740 int64_t fwk_frame_number = *frame_number;
6741 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6742 }
6743
6744 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6745 int32_t fps_range[2];
6746 fps_range[0] = (int32_t)float_range->min_fps;
6747 fps_range[1] = (int32_t)float_range->max_fps;
6748 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6749 fps_range, 2);
6750 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6751 fps_range[0], fps_range[1]);
6752 }
6753
6754 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6755 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6756 }
6757
6758 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6759 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6760 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6761 *sceneMode);
6762 if (NAME_NOT_FOUND != val) {
6763 uint8_t fwkSceneMode = (uint8_t)val;
6764 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6765 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6766 fwkSceneMode);
6767 }
6768 }
6769
6770 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6771 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6772 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6773 }
6774
6775 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6776 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6777 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6778 }
6779
6780 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6781 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6782 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6783 }
6784
6785 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6786 CAM_INTF_META_EDGE_MODE, metadata) {
6787 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6788 }
6789
6790 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6791 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6792 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6793 }
6794
6795 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6796 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6797 }
6798
6799 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6800 if (0 <= *flashState) {
6801 uint8_t fwk_flashState = (uint8_t) *flashState;
6802 if (!gCamCapability[mCameraId]->flash_available) {
6803 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6804 }
6805 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6806 }
6807 }
6808
6809 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6810 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6811 if (NAME_NOT_FOUND != val) {
6812 uint8_t fwk_flashMode = (uint8_t)val;
6813 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6814 }
6815 }
6816
6817 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6818 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6819 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6820 }
6821
6822 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6823 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6824 }
6825
6826 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6827 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6828 }
6829
6830 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6831 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6832 }
6833
6834 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6835 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6836 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6837 }
6838
6839 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6840 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6841 LOGD("fwk_videoStab = %d", fwk_videoStab);
6842 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6843 } else {
6844 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6845 // and so hardcoding the Video Stab result to OFF mode.
6846 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6847 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006848 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006849 }
6850
6851 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6852 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6853 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6854 }
6855
6856 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6857 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6858 }
6859
Thierry Strudel3d639192016-09-09 11:52:26 -07006860 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6861 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006862 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006863
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006864 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6865 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006866
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006867 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006868 blackLevelAppliedPattern->cam_black_level[0],
6869 blackLevelAppliedPattern->cam_black_level[1],
6870 blackLevelAppliedPattern->cam_black_level[2],
6871 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006872 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6873 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006874
6875#ifndef USE_HAL_3_3
6876 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006877 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6878 // depth space.
6879 fwk_blackLevelInd[0] /= 4.0;
6880 fwk_blackLevelInd[1] /= 4.0;
6881 fwk_blackLevelInd[2] /= 4.0;
6882 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006883 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6884 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006885#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006886 }
6887
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006888#ifndef USE_HAL_3_3
6889 // Fixed whitelevel is used by ISP/Sensor
6890 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6891 &gCamCapability[mCameraId]->white_level, 1);
6892#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006893
6894 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6895 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6896 int32_t scalerCropRegion[4];
6897 scalerCropRegion[0] = hScalerCropRegion->left;
6898 scalerCropRegion[1] = hScalerCropRegion->top;
6899 scalerCropRegion[2] = hScalerCropRegion->width;
6900 scalerCropRegion[3] = hScalerCropRegion->height;
6901
6902 // Adjust crop region from sensor output coordinate system to active
6903 // array coordinate system.
6904 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6905 scalerCropRegion[2], scalerCropRegion[3]);
6906
6907 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6908 }
6909
6910 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6911 LOGD("sensorExpTime = %lld", *sensorExpTime);
6912 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6913 }
6914
6915 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6916 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6917 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6918 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6919 }
6920
6921 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6922 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6923 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6924 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6925 sensorRollingShutterSkew, 1);
6926 }
6927
6928 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6929 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6930 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6931
6932 //calculate the noise profile based on sensitivity
6933 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6934 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6935 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6936 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6937 noise_profile[i] = noise_profile_S;
6938 noise_profile[i+1] = noise_profile_O;
6939 }
6940 LOGD("noise model entry (S, O) is (%f, %f)",
6941 noise_profile_S, noise_profile_O);
6942 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6943 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6944 }
6945
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006946#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006947 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006948 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006949 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006950 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006951 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6952 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6953 }
6954 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006955#endif
6956
Thierry Strudel3d639192016-09-09 11:52:26 -07006957 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6958 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6959 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6960 }
6961
6962 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6963 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6964 *faceDetectMode);
6965 if (NAME_NOT_FOUND != val) {
6966 uint8_t fwk_faceDetectMode = (uint8_t)val;
6967 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6968
6969 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6970 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6971 CAM_INTF_META_FACE_DETECTION, metadata) {
6972 uint8_t numFaces = MIN(
6973 faceDetectionInfo->num_faces_detected, MAX_ROI);
6974 int32_t faceIds[MAX_ROI];
6975 uint8_t faceScores[MAX_ROI];
6976 int32_t faceRectangles[MAX_ROI * 4];
6977 int32_t faceLandmarks[MAX_ROI * 6];
6978 size_t j = 0, k = 0;
6979
6980 for (size_t i = 0; i < numFaces; i++) {
6981 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6982 // Adjust crop region from sensor output coordinate system to active
6983 // array coordinate system.
6984 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6985 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6986 rect.width, rect.height);
6987
6988 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6989 faceRectangles+j, -1);
6990
6991 j+= 4;
6992 }
6993 if (numFaces <= 0) {
6994 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6995 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6996 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6997 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6998 }
6999
7000 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7001 numFaces);
7002 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7003 faceRectangles, numFaces * 4U);
7004 if (fwk_faceDetectMode ==
7005 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7006 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7007 CAM_INTF_META_FACE_LANDMARK, metadata) {
7008
7009 for (size_t i = 0; i < numFaces; i++) {
7010 // Map the co-ordinate sensor output coordinate system to active
7011 // array coordinate system.
7012 mCropRegionMapper.toActiveArray(
7013 landmarks->face_landmarks[i].left_eye_center.x,
7014 landmarks->face_landmarks[i].left_eye_center.y);
7015 mCropRegionMapper.toActiveArray(
7016 landmarks->face_landmarks[i].right_eye_center.x,
7017 landmarks->face_landmarks[i].right_eye_center.y);
7018 mCropRegionMapper.toActiveArray(
7019 landmarks->face_landmarks[i].mouth_center.x,
7020 landmarks->face_landmarks[i].mouth_center.y);
7021
7022 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007023 k+= TOTAL_LANDMARK_INDICES;
7024 }
7025 } else {
7026 for (size_t i = 0; i < numFaces; i++) {
7027 setInvalidLandmarks(faceLandmarks+k);
7028 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007029 }
7030 }
7031
7032 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7033 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7034 faceLandmarks, numFaces * 6U);
7035 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007036 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7037 CAM_INTF_META_FACE_BLINK, metadata) {
7038 uint8_t detected[MAX_ROI];
7039 uint8_t degree[MAX_ROI * 2];
7040 for (size_t i = 0; i < numFaces; i++) {
7041 detected[i] = blinks->blink[i].blink_detected;
7042 degree[2 * i] = blinks->blink[i].left_blink;
7043 degree[2 * i + 1] = blinks->blink[i].right_blink;
7044 }
7045 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7046 detected, numFaces);
7047 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7048 degree, numFaces * 2);
7049 }
7050 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7051 CAM_INTF_META_FACE_SMILE, metadata) {
7052 uint8_t degree[MAX_ROI];
7053 uint8_t confidence[MAX_ROI];
7054 for (size_t i = 0; i < numFaces; i++) {
7055 degree[i] = smiles->smile[i].smile_degree;
7056 confidence[i] = smiles->smile[i].smile_confidence;
7057 }
7058 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7059 degree, numFaces);
7060 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7061 confidence, numFaces);
7062 }
7063 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7064 CAM_INTF_META_FACE_GAZE, metadata) {
7065 int8_t angle[MAX_ROI];
7066 int32_t direction[MAX_ROI * 3];
7067 int8_t degree[MAX_ROI * 2];
7068 for (size_t i = 0; i < numFaces; i++) {
7069 angle[i] = gazes->gaze[i].gaze_angle;
7070 direction[3 * i] = gazes->gaze[i].updown_dir;
7071 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7072 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7073 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7074 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7075 }
7076 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7077 (uint8_t *)angle, numFaces);
7078 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7079 direction, numFaces * 3);
7080 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7081 (uint8_t *)degree, numFaces * 2);
7082 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007083 }
7084 }
7085 }
7086 }
7087
7088 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7089 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007090 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007091 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007092 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007093
Shuzhen Wang14415f52016-11-16 18:26:18 -08007094 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7095 histogramBins = *histBins;
7096 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7097 }
7098
7099 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007100 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7101 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007102 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007103
7104 switch (stats_data->type) {
7105 case CAM_HISTOGRAM_TYPE_BAYER:
7106 switch (stats_data->bayer_stats.data_type) {
7107 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007108 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7109 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007110 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007111 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7112 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007113 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007114 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7115 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007116 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007117 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007118 case CAM_STATS_CHANNEL_R:
7119 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007120 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7121 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007122 }
7123 break;
7124 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007125 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007126 break;
7127 }
7128
Shuzhen Wang14415f52016-11-16 18:26:18 -08007129 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130 }
7131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007132 }
7133
7134 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7135 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7136 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7137 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7138 }
7139
7140 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7141 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7142 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7143 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7144 }
7145
7146 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7147 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7148 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7149 CAM_MAX_SHADING_MAP_HEIGHT);
7150 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7151 CAM_MAX_SHADING_MAP_WIDTH);
7152 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7153 lensShadingMap->lens_shading, 4U * map_width * map_height);
7154 }
7155
7156 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7157 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7158 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7159 }
7160
7161 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7162 //Populate CAM_INTF_META_TONEMAP_CURVES
7163 /* ch0 = G, ch 1 = B, ch 2 = R*/
7164 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7165 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7166 tonemap->tonemap_points_cnt,
7167 CAM_MAX_TONEMAP_CURVE_SIZE);
7168 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7169 }
7170
7171 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7172 &tonemap->curves[0].tonemap_points[0][0],
7173 tonemap->tonemap_points_cnt * 2);
7174
7175 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7176 &tonemap->curves[1].tonemap_points[0][0],
7177 tonemap->tonemap_points_cnt * 2);
7178
7179 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7180 &tonemap->curves[2].tonemap_points[0][0],
7181 tonemap->tonemap_points_cnt * 2);
7182 }
7183
7184 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7185 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7186 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7187 CC_GAIN_MAX);
7188 }
7189
7190 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7191 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7192 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7193 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7194 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7195 }
7196
7197 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7198 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7199 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7200 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7201 toneCurve->tonemap_points_cnt,
7202 CAM_MAX_TONEMAP_CURVE_SIZE);
7203 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7204 }
7205 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7206 (float*)toneCurve->curve.tonemap_points,
7207 toneCurve->tonemap_points_cnt * 2);
7208 }
7209
7210 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7211 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7212 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7213 predColorCorrectionGains->gains, 4);
7214 }
7215
7216 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7217 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7218 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7219 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7220 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7221 }
7222
7223 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7224 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7225 }
7226
7227 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7228 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7229 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7230 }
7231
7232 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7233 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7234 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7235 }
7236
7237 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7238 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7239 *effectMode);
7240 if (NAME_NOT_FOUND != val) {
7241 uint8_t fwk_effectMode = (uint8_t)val;
7242 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7243 }
7244 }
7245
7246 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7247 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7248 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7249 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7250 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7251 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7252 }
7253 int32_t fwk_testPatternData[4];
7254 fwk_testPatternData[0] = testPatternData->r;
7255 fwk_testPatternData[3] = testPatternData->b;
7256 switch (gCamCapability[mCameraId]->color_arrangement) {
7257 case CAM_FILTER_ARRANGEMENT_RGGB:
7258 case CAM_FILTER_ARRANGEMENT_GRBG:
7259 fwk_testPatternData[1] = testPatternData->gr;
7260 fwk_testPatternData[2] = testPatternData->gb;
7261 break;
7262 case CAM_FILTER_ARRANGEMENT_GBRG:
7263 case CAM_FILTER_ARRANGEMENT_BGGR:
7264 fwk_testPatternData[2] = testPatternData->gr;
7265 fwk_testPatternData[1] = testPatternData->gb;
7266 break;
7267 default:
7268 LOGE("color arrangement %d is not supported",
7269 gCamCapability[mCameraId]->color_arrangement);
7270 break;
7271 }
7272 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7273 }
7274
7275 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7276 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7277 }
7278
7279 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7280 String8 str((const char *)gps_methods);
7281 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7282 }
7283
7284 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7285 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7286 }
7287
7288 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7289 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7290 }
7291
7292 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7293 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7294 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7295 }
7296
7297 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7298 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7299 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7300 }
7301
7302 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7303 int32_t fwk_thumb_size[2];
7304 fwk_thumb_size[0] = thumb_size->width;
7305 fwk_thumb_size[1] = thumb_size->height;
7306 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7307 }
7308
7309 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7310 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7311 privateData,
7312 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7313 }
7314
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007315 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007316 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007317 meteringMode, 1);
7318 }
7319
Thierry Strudel54dc9782017-02-15 12:12:10 -08007320 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7321 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7322 LOGD("hdr_scene_data: %d %f\n",
7323 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7324 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7325 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7326 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7327 &isHdr, 1);
7328 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7329 &isHdrConfidence, 1);
7330 }
7331
7332
7333
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 if (metadata->is_tuning_params_valid) {
7335 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7336 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7337 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7338
7339
7340 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7341 sizeof(uint32_t));
7342 data += sizeof(uint32_t);
7343
7344 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7345 sizeof(uint32_t));
7346 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7347 data += sizeof(uint32_t);
7348
7349 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7350 sizeof(uint32_t));
7351 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7352 data += sizeof(uint32_t);
7353
7354 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7355 sizeof(uint32_t));
7356 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7357 data += sizeof(uint32_t);
7358
7359 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7360 sizeof(uint32_t));
7361 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7362 data += sizeof(uint32_t);
7363
7364 metadata->tuning_params.tuning_mod3_data_size = 0;
7365 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7366 sizeof(uint32_t));
7367 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7368 data += sizeof(uint32_t);
7369
7370 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7371 TUNING_SENSOR_DATA_MAX);
7372 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7373 count);
7374 data += count;
7375
7376 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7377 TUNING_VFE_DATA_MAX);
7378 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7379 count);
7380 data += count;
7381
7382 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7383 TUNING_CPP_DATA_MAX);
7384 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7385 count);
7386 data += count;
7387
7388 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7389 TUNING_CAC_DATA_MAX);
7390 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7391 count);
7392 data += count;
7393
7394 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7395 (int32_t *)(void *)tuning_meta_data_blob,
7396 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7397 }
7398
7399 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7400 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7401 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7402 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7403 NEUTRAL_COL_POINTS);
7404 }
7405
7406 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7407 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7408 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7409 }
7410
7411 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7412 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7413 // Adjust crop region from sensor output coordinate system to active
7414 // array coordinate system.
7415 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7416 hAeRegions->rect.width, hAeRegions->rect.height);
7417
7418 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7419 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7420 REGIONS_TUPLE_COUNT);
7421 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7422 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7423 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7424 hAeRegions->rect.height);
7425 }
7426
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007427 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7428 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7429 if (NAME_NOT_FOUND != val) {
7430 uint8_t fwkAfMode = (uint8_t)val;
7431 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7432 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7433 } else {
7434 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7435 val);
7436 }
7437 }
7438
Thierry Strudel3d639192016-09-09 11:52:26 -07007439 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7440 uint8_t fwk_afState = (uint8_t) *afState;
7441 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007442 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007443 }
7444
7445 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7446 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7447 }
7448
7449 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7450 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7451 }
7452
7453 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7454 uint8_t fwk_lensState = *lensState;
7455 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7456 }
7457
7458 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7459 /*af regions*/
7460 int32_t afRegions[REGIONS_TUPLE_COUNT];
7461 // Adjust crop region from sensor output coordinate system to active
7462 // array coordinate system.
7463 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7464 hAfRegions->rect.width, hAfRegions->rect.height);
7465
7466 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7467 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7468 REGIONS_TUPLE_COUNT);
7469 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7470 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7471 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7472 hAfRegions->rect.height);
7473 }
7474
7475 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007476 uint32_t ab_mode = *hal_ab_mode;
7477 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7478 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7479 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7480 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007481 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007482 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007483 if (NAME_NOT_FOUND != val) {
7484 uint8_t fwk_ab_mode = (uint8_t)val;
7485 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7486 }
7487 }
7488
7489 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7490 int val = lookupFwkName(SCENE_MODES_MAP,
7491 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7492 if (NAME_NOT_FOUND != val) {
7493 uint8_t fwkBestshotMode = (uint8_t)val;
7494 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7495 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7496 } else {
7497 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7498 }
7499 }
7500
7501 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7502 uint8_t fwk_mode = (uint8_t) *mode;
7503 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7504 }
7505
7506 /* Constant metadata values to be update*/
7507 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7508 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7509
7510 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7511 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7512
7513 int32_t hotPixelMap[2];
7514 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7515
7516 // CDS
7517 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7518 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7519 }
7520
Thierry Strudel04e026f2016-10-10 11:27:36 -07007521 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7522 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007523 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007524 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7525 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7526 } else {
7527 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7528 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007529
7530 if(fwk_hdr != curr_hdr_state) {
7531 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7532 if(fwk_hdr)
7533 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7534 else
7535 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7536 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007537 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7538 }
7539
Thierry Strudel54dc9782017-02-15 12:12:10 -08007540 //binning correction
7541 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7542 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7543 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7544 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7545 }
7546
Thierry Strudel04e026f2016-10-10 11:27:36 -07007547 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007548 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007549 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7550 int8_t is_ir_on = 0;
7551
7552 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7553 if(is_ir_on != curr_ir_state) {
7554 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7555 if(is_ir_on)
7556 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7557 else
7558 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7559 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007560 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007561 }
7562
Thierry Strudel269c81a2016-10-12 12:13:59 -07007563 // AEC SPEED
7564 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7565 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7566 }
7567
7568 // AWB SPEED
7569 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7570 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7571 }
7572
Thierry Strudel3d639192016-09-09 11:52:26 -07007573 // TNR
7574 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7575 uint8_t tnr_enable = tnr->denoise_enable;
7576 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007577 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7578 int8_t is_tnr_on = 0;
7579
7580 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7581 if(is_tnr_on != curr_tnr_state) {
7582 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7583 if(is_tnr_on)
7584 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7585 else
7586 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007588
7589 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7590 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7591 }
7592
7593 // Reprocess crop data
7594 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7595 uint8_t cnt = crop_data->num_of_streams;
7596 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7597 // mm-qcamera-daemon only posts crop_data for streams
7598 // not linked to pproc. So no valid crop metadata is not
7599 // necessarily an error case.
7600 LOGD("No valid crop metadata entries");
7601 } else {
7602 uint32_t reproc_stream_id;
7603 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7604 LOGD("No reprocessible stream found, ignore crop data");
7605 } else {
7606 int rc = NO_ERROR;
7607 Vector<int32_t> roi_map;
7608 int32_t *crop = new int32_t[cnt*4];
7609 if (NULL == crop) {
7610 rc = NO_MEMORY;
7611 }
7612 if (NO_ERROR == rc) {
7613 int32_t streams_found = 0;
7614 for (size_t i = 0; i < cnt; i++) {
7615 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7616 if (pprocDone) {
7617 // HAL already does internal reprocessing,
7618 // either via reprocessing before JPEG encoding,
7619 // or offline postprocessing for pproc bypass case.
7620 crop[0] = 0;
7621 crop[1] = 0;
7622 crop[2] = mInputStreamInfo.dim.width;
7623 crop[3] = mInputStreamInfo.dim.height;
7624 } else {
7625 crop[0] = crop_data->crop_info[i].crop.left;
7626 crop[1] = crop_data->crop_info[i].crop.top;
7627 crop[2] = crop_data->crop_info[i].crop.width;
7628 crop[3] = crop_data->crop_info[i].crop.height;
7629 }
7630 roi_map.add(crop_data->crop_info[i].roi_map.left);
7631 roi_map.add(crop_data->crop_info[i].roi_map.top);
7632 roi_map.add(crop_data->crop_info[i].roi_map.width);
7633 roi_map.add(crop_data->crop_info[i].roi_map.height);
7634 streams_found++;
7635 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7636 crop[0], crop[1], crop[2], crop[3]);
7637 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7638 crop_data->crop_info[i].roi_map.left,
7639 crop_data->crop_info[i].roi_map.top,
7640 crop_data->crop_info[i].roi_map.width,
7641 crop_data->crop_info[i].roi_map.height);
7642 break;
7643
7644 }
7645 }
7646 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7647 &streams_found, 1);
7648 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7649 crop, (size_t)(streams_found * 4));
7650 if (roi_map.array()) {
7651 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7652 roi_map.array(), roi_map.size());
7653 }
7654 }
7655 if (crop) {
7656 delete [] crop;
7657 }
7658 }
7659 }
7660 }
7661
7662 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7663 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7664 // so hardcoding the CAC result to OFF mode.
7665 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7666 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7667 } else {
7668 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7669 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7670 *cacMode);
7671 if (NAME_NOT_FOUND != val) {
7672 uint8_t resultCacMode = (uint8_t)val;
7673 // check whether CAC result from CB is equal to Framework set CAC mode
7674 // If not equal then set the CAC mode came in corresponding request
7675 if (fwk_cacMode != resultCacMode) {
7676 resultCacMode = fwk_cacMode;
7677 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007678 //Check if CAC is disabled by property
7679 if (m_cacModeDisabled) {
7680 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7681 }
7682
Thierry Strudel3d639192016-09-09 11:52:26 -07007683 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7684 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7685 } else {
7686 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7687 }
7688 }
7689 }
7690
7691 // Post blob of cam_cds_data through vendor tag.
7692 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7693 uint8_t cnt = cdsInfo->num_of_streams;
7694 cam_cds_data_t cdsDataOverride;
7695 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7696 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7697 cdsDataOverride.num_of_streams = 1;
7698 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7699 uint32_t reproc_stream_id;
7700 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7701 LOGD("No reprocessible stream found, ignore cds data");
7702 } else {
7703 for (size_t i = 0; i < cnt; i++) {
7704 if (cdsInfo->cds_info[i].stream_id ==
7705 reproc_stream_id) {
7706 cdsDataOverride.cds_info[0].cds_enable =
7707 cdsInfo->cds_info[i].cds_enable;
7708 break;
7709 }
7710 }
7711 }
7712 } else {
7713 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7714 }
7715 camMetadata.update(QCAMERA3_CDS_INFO,
7716 (uint8_t *)&cdsDataOverride,
7717 sizeof(cam_cds_data_t));
7718 }
7719
7720 // Ldaf calibration data
7721 if (!mLdafCalibExist) {
7722 IF_META_AVAILABLE(uint32_t, ldafCalib,
7723 CAM_INTF_META_LDAF_EXIF, metadata) {
7724 mLdafCalibExist = true;
7725 mLdafCalib[0] = ldafCalib[0];
7726 mLdafCalib[1] = ldafCalib[1];
7727 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7728 ldafCalib[0], ldafCalib[1]);
7729 }
7730 }
7731
Thierry Strudel54dc9782017-02-15 12:12:10 -08007732 // EXIF debug data through vendor tag
7733 /*
7734 * Mobicat Mask can assume 3 values:
7735 * 1 refers to Mobicat data,
7736 * 2 refers to Stats Debug and Exif Debug Data
7737 * 3 refers to Mobicat and Stats Debug Data
7738 * We want to make sure that we are sending Exif debug data
7739 * only when Mobicat Mask is 2.
7740 */
7741 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7742 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7743 (uint8_t *)(void *)mExifParams.debug_params,
7744 sizeof(mm_jpeg_debug_exif_params_t));
7745 }
7746
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007747 // Reprocess and DDM debug data through vendor tag
7748 cam_reprocess_info_t repro_info;
7749 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007750 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7751 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007752 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 }
7754 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7755 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007756 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007757 }
7758 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7759 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007760 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007761 }
7762 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7763 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007764 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007765 }
7766 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7767 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007768 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007769 }
7770 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007771 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007772 }
7773 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7774 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007775 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007776 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007777 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7778 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7779 }
7780 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7781 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7782 }
7783 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7784 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007785
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007786 // INSTANT AEC MODE
7787 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7788 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7789 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7790 }
7791
Shuzhen Wange763e802016-03-31 10:24:29 -07007792 // AF scene change
7793 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7794 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7795 }
7796
Thierry Strudel3d639192016-09-09 11:52:26 -07007797 resultMetadata = camMetadata.release();
7798 return resultMetadata;
7799}
7800
7801/*===========================================================================
7802 * FUNCTION : saveExifParams
7803 *
7804 * DESCRIPTION:
7805 *
7806 * PARAMETERS :
7807 * @metadata : metadata information from callback
7808 *
7809 * RETURN : none
7810 *
7811 *==========================================================================*/
7812void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7813{
7814 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7815 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7816 if (mExifParams.debug_params) {
7817 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7818 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7819 }
7820 }
7821 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7822 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7823 if (mExifParams.debug_params) {
7824 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7825 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7826 }
7827 }
7828 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7829 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7830 if (mExifParams.debug_params) {
7831 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7832 mExifParams.debug_params->af_debug_params_valid = TRUE;
7833 }
7834 }
7835 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7836 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7837 if (mExifParams.debug_params) {
7838 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7839 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7840 }
7841 }
7842 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7843 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7844 if (mExifParams.debug_params) {
7845 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7846 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7847 }
7848 }
7849 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7850 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7851 if (mExifParams.debug_params) {
7852 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7853 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7854 }
7855 }
7856 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7857 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7858 if (mExifParams.debug_params) {
7859 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7860 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7861 }
7862 }
7863 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7864 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7865 if (mExifParams.debug_params) {
7866 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7867 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7868 }
7869 }
7870}
7871
7872/*===========================================================================
7873 * FUNCTION : get3AExifParams
7874 *
7875 * DESCRIPTION:
7876 *
7877 * PARAMETERS : none
7878 *
7879 *
7880 * RETURN : mm_jpeg_exif_params_t
7881 *
7882 *==========================================================================*/
7883mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7884{
7885 return mExifParams;
7886}
7887
7888/*===========================================================================
7889 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7890 *
7891 * DESCRIPTION:
7892 *
7893 * PARAMETERS :
7894 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007895 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7896 * urgent metadata in a batch. Always true for
7897 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007898 *
7899 * RETURN : camera_metadata_t*
7900 * metadata in a format specified by fwk
7901 *==========================================================================*/
7902camera_metadata_t*
7903QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007904 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007905{
7906 CameraMetadata camMetadata;
7907 camera_metadata_t *resultMetadata;
7908
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007909 if (!lastUrgentMetadataInBatch) {
7910 /* In batch mode, use empty metadata if this is not the last in batch
7911 */
7912 resultMetadata = allocate_camera_metadata(0, 0);
7913 return resultMetadata;
7914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007915
7916 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7917 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7918 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7919 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7920 }
7921
7922 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7923 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7924 &aecTrigger->trigger, 1);
7925 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7926 &aecTrigger->trigger_id, 1);
7927 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7928 aecTrigger->trigger);
7929 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7930 aecTrigger->trigger_id);
7931 }
7932
7933 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7934 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7935 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7936 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7937 }
7938
Thierry Strudel3d639192016-09-09 11:52:26 -07007939 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7940 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7941 &af_trigger->trigger, 1);
7942 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7943 af_trigger->trigger);
7944 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7945 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7946 af_trigger->trigger_id);
7947 }
7948
7949 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7950 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7951 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7952 if (NAME_NOT_FOUND != val) {
7953 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7954 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7955 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7956 } else {
7957 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7958 }
7959 }
7960
7961 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7962 uint32_t aeMode = CAM_AE_MODE_MAX;
7963 int32_t flashMode = CAM_FLASH_MODE_MAX;
7964 int32_t redeye = -1;
7965 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7966 aeMode = *pAeMode;
7967 }
7968 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7969 flashMode = *pFlashMode;
7970 }
7971 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7972 redeye = *pRedeye;
7973 }
7974
7975 if (1 == redeye) {
7976 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7977 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7978 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7979 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7980 flashMode);
7981 if (NAME_NOT_FOUND != val) {
7982 fwk_aeMode = (uint8_t)val;
7983 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7984 } else {
7985 LOGE("Unsupported flash mode %d", flashMode);
7986 }
7987 } else if (aeMode == CAM_AE_MODE_ON) {
7988 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7989 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7990 } else if (aeMode == CAM_AE_MODE_OFF) {
7991 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7992 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7993 } else {
7994 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7995 "flashMode:%d, aeMode:%u!!!",
7996 redeye, flashMode, aeMode);
7997 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007998 if (mInstantAEC) {
7999 // Increment frame Idx count untill a bound reached for instant AEC.
8000 mInstantAecFrameIdxCount++;
8001 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8002 CAM_INTF_META_AEC_INFO, metadata) {
8003 LOGH("ae_params->settled = %d",ae_params->settled);
8004 // If AEC settled, or if number of frames reached bound value,
8005 // should reset instant AEC.
8006 if (ae_params->settled ||
8007 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8008 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8009 mInstantAEC = false;
8010 mResetInstantAEC = true;
8011 mInstantAecFrameIdxCount = 0;
8012 }
8013 }
8014 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008015 resultMetadata = camMetadata.release();
8016 return resultMetadata;
8017}
8018
8019/*===========================================================================
8020 * FUNCTION : dumpMetadataToFile
8021 *
8022 * DESCRIPTION: Dumps tuning metadata to file system
8023 *
8024 * PARAMETERS :
8025 * @meta : tuning metadata
8026 * @dumpFrameCount : current dump frame count
8027 * @enabled : Enable mask
8028 *
8029 *==========================================================================*/
8030void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8031 uint32_t &dumpFrameCount,
8032 bool enabled,
8033 const char *type,
8034 uint32_t frameNumber)
8035{
8036 //Some sanity checks
8037 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8038 LOGE("Tuning sensor data size bigger than expected %d: %d",
8039 meta.tuning_sensor_data_size,
8040 TUNING_SENSOR_DATA_MAX);
8041 return;
8042 }
8043
8044 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8045 LOGE("Tuning VFE data size bigger than expected %d: %d",
8046 meta.tuning_vfe_data_size,
8047 TUNING_VFE_DATA_MAX);
8048 return;
8049 }
8050
8051 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8052 LOGE("Tuning CPP data size bigger than expected %d: %d",
8053 meta.tuning_cpp_data_size,
8054 TUNING_CPP_DATA_MAX);
8055 return;
8056 }
8057
8058 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8059 LOGE("Tuning CAC data size bigger than expected %d: %d",
8060 meta.tuning_cac_data_size,
8061 TUNING_CAC_DATA_MAX);
8062 return;
8063 }
8064 //
8065
8066 if(enabled){
8067 char timeBuf[FILENAME_MAX];
8068 char buf[FILENAME_MAX];
8069 memset(buf, 0, sizeof(buf));
8070 memset(timeBuf, 0, sizeof(timeBuf));
8071 time_t current_time;
8072 struct tm * timeinfo;
8073 time (&current_time);
8074 timeinfo = localtime (&current_time);
8075 if (timeinfo != NULL) {
8076 strftime (timeBuf, sizeof(timeBuf),
8077 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8078 }
8079 String8 filePath(timeBuf);
8080 snprintf(buf,
8081 sizeof(buf),
8082 "%dm_%s_%d.bin",
8083 dumpFrameCount,
8084 type,
8085 frameNumber);
8086 filePath.append(buf);
8087 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8088 if (file_fd >= 0) {
8089 ssize_t written_len = 0;
8090 meta.tuning_data_version = TUNING_DATA_VERSION;
8091 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8092 written_len += write(file_fd, data, sizeof(uint32_t));
8093 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8094 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8095 written_len += write(file_fd, data, sizeof(uint32_t));
8096 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8097 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8098 written_len += write(file_fd, data, sizeof(uint32_t));
8099 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8100 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8101 written_len += write(file_fd, data, sizeof(uint32_t));
8102 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8103 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8104 written_len += write(file_fd, data, sizeof(uint32_t));
8105 meta.tuning_mod3_data_size = 0;
8106 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8107 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8108 written_len += write(file_fd, data, sizeof(uint32_t));
8109 size_t total_size = meta.tuning_sensor_data_size;
8110 data = (void *)((uint8_t *)&meta.data);
8111 written_len += write(file_fd, data, total_size);
8112 total_size = meta.tuning_vfe_data_size;
8113 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8114 written_len += write(file_fd, data, total_size);
8115 total_size = meta.tuning_cpp_data_size;
8116 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8117 written_len += write(file_fd, data, total_size);
8118 total_size = meta.tuning_cac_data_size;
8119 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8120 written_len += write(file_fd, data, total_size);
8121 close(file_fd);
8122 }else {
8123 LOGE("fail to open file for metadata dumping");
8124 }
8125 }
8126}
8127
8128/*===========================================================================
8129 * FUNCTION : cleanAndSortStreamInfo
8130 *
8131 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8132 * and sort them such that raw stream is at the end of the list
8133 * This is a workaround for camera daemon constraint.
8134 *
8135 * PARAMETERS : None
8136 *
8137 *==========================================================================*/
8138void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8139{
8140 List<stream_info_t *> newStreamInfo;
8141
8142 /*clean up invalid streams*/
8143 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8144 it != mStreamInfo.end();) {
8145 if(((*it)->status) == INVALID){
8146 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8147 delete channel;
8148 free(*it);
8149 it = mStreamInfo.erase(it);
8150 } else {
8151 it++;
8152 }
8153 }
8154
8155 // Move preview/video/callback/snapshot streams into newList
8156 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8157 it != mStreamInfo.end();) {
8158 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8159 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8160 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8161 newStreamInfo.push_back(*it);
8162 it = mStreamInfo.erase(it);
8163 } else
8164 it++;
8165 }
8166 // Move raw streams into newList
8167 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8168 it != mStreamInfo.end();) {
8169 newStreamInfo.push_back(*it);
8170 it = mStreamInfo.erase(it);
8171 }
8172
8173 mStreamInfo = newStreamInfo;
8174}
8175
8176/*===========================================================================
8177 * FUNCTION : extractJpegMetadata
8178 *
8179 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8180 * JPEG metadata is cached in HAL, and return as part of capture
8181 * result when metadata is returned from camera daemon.
8182 *
8183 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8184 * @request: capture request
8185 *
8186 *==========================================================================*/
8187void QCamera3HardwareInterface::extractJpegMetadata(
8188 CameraMetadata& jpegMetadata,
8189 const camera3_capture_request_t *request)
8190{
8191 CameraMetadata frame_settings;
8192 frame_settings = request->settings;
8193
8194 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8195 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8196 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8197 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8198
8199 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8200 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8201 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8202 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8203
8204 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8205 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8206 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8207 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8208
8209 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8210 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8211 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8212 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8213
8214 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8215 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8216 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8217 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8218
8219 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8220 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8221 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8222 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8223
8224 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8225 int32_t thumbnail_size[2];
8226 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8227 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8228 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8229 int32_t orientation =
8230 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008231 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008232 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8233 int32_t temp;
8234 temp = thumbnail_size[0];
8235 thumbnail_size[0] = thumbnail_size[1];
8236 thumbnail_size[1] = temp;
8237 }
8238 }
8239 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8240 thumbnail_size,
8241 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8242 }
8243
8244}
8245
8246/*===========================================================================
8247 * FUNCTION : convertToRegions
8248 *
8249 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8250 *
8251 * PARAMETERS :
8252 * @rect : cam_rect_t struct to convert
8253 * @region : int32_t destination array
8254 * @weight : if we are converting from cam_area_t, weight is valid
8255 * else weight = -1
8256 *
8257 *==========================================================================*/
8258void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8259 int32_t *region, int weight)
8260{
8261 region[0] = rect.left;
8262 region[1] = rect.top;
8263 region[2] = rect.left + rect.width;
8264 region[3] = rect.top + rect.height;
8265 if (weight > -1) {
8266 region[4] = weight;
8267 }
8268}
8269
8270/*===========================================================================
8271 * FUNCTION : convertFromRegions
8272 *
8273 * DESCRIPTION: helper method to convert from array to cam_rect_t
8274 *
8275 * PARAMETERS :
8276 * @rect : cam_rect_t struct to convert
8277 * @region : int32_t destination array
8278 * @weight : if we are converting from cam_area_t, weight is valid
8279 * else weight = -1
8280 *
8281 *==========================================================================*/
8282void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008283 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008284{
Thierry Strudel3d639192016-09-09 11:52:26 -07008285 int32_t x_min = frame_settings.find(tag).data.i32[0];
8286 int32_t y_min = frame_settings.find(tag).data.i32[1];
8287 int32_t x_max = frame_settings.find(tag).data.i32[2];
8288 int32_t y_max = frame_settings.find(tag).data.i32[3];
8289 roi.weight = frame_settings.find(tag).data.i32[4];
8290 roi.rect.left = x_min;
8291 roi.rect.top = y_min;
8292 roi.rect.width = x_max - x_min;
8293 roi.rect.height = y_max - y_min;
8294}
8295
8296/*===========================================================================
8297 * FUNCTION : resetIfNeededROI
8298 *
8299 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8300 * crop region
8301 *
8302 * PARAMETERS :
8303 * @roi : cam_area_t struct to resize
8304 * @scalerCropRegion : cam_crop_region_t region to compare against
8305 *
8306 *
8307 *==========================================================================*/
8308bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8309 const cam_crop_region_t* scalerCropRegion)
8310{
8311 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8312 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8313 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8314 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8315
8316 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8317 * without having this check the calculations below to validate if the roi
8318 * is inside scalar crop region will fail resulting in the roi not being
8319 * reset causing algorithm to continue to use stale roi window
8320 */
8321 if (roi->weight == 0) {
8322 return true;
8323 }
8324
8325 if ((roi_x_max < scalerCropRegion->left) ||
8326 // right edge of roi window is left of scalar crop's left edge
8327 (roi_y_max < scalerCropRegion->top) ||
8328 // bottom edge of roi window is above scalar crop's top edge
8329 (roi->rect.left > crop_x_max) ||
8330 // left edge of roi window is beyond(right) of scalar crop's right edge
8331 (roi->rect.top > crop_y_max)){
8332 // top edge of roi windo is above scalar crop's top edge
8333 return false;
8334 }
8335 if (roi->rect.left < scalerCropRegion->left) {
8336 roi->rect.left = scalerCropRegion->left;
8337 }
8338 if (roi->rect.top < scalerCropRegion->top) {
8339 roi->rect.top = scalerCropRegion->top;
8340 }
8341 if (roi_x_max > crop_x_max) {
8342 roi_x_max = crop_x_max;
8343 }
8344 if (roi_y_max > crop_y_max) {
8345 roi_y_max = crop_y_max;
8346 }
8347 roi->rect.width = roi_x_max - roi->rect.left;
8348 roi->rect.height = roi_y_max - roi->rect.top;
8349 return true;
8350}
8351
8352/*===========================================================================
8353 * FUNCTION : convertLandmarks
8354 *
8355 * DESCRIPTION: helper method to extract the landmarks from face detection info
8356 *
8357 * PARAMETERS :
8358 * @landmark_data : input landmark data to be converted
8359 * @landmarks : int32_t destination array
8360 *
8361 *
8362 *==========================================================================*/
8363void QCamera3HardwareInterface::convertLandmarks(
8364 cam_face_landmarks_info_t landmark_data,
8365 int32_t *landmarks)
8366{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008367 if (landmark_data.is_left_eye_valid) {
8368 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8369 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8370 } else {
8371 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8372 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8373 }
8374
8375 if (landmark_data.is_right_eye_valid) {
8376 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8377 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8378 } else {
8379 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8380 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8381 }
8382
8383 if (landmark_data.is_mouth_valid) {
8384 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8385 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8386 } else {
8387 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8388 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8389 }
8390}
8391
8392/*===========================================================================
8393 * FUNCTION : setInvalidLandmarks
8394 *
8395 * DESCRIPTION: helper method to set invalid landmarks
8396 *
8397 * PARAMETERS :
8398 * @landmarks : int32_t destination array
8399 *
8400 *
8401 *==========================================================================*/
8402void QCamera3HardwareInterface::setInvalidLandmarks(
8403 int32_t *landmarks)
8404{
8405 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8406 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8407 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8408 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8409 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8410 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008411}
8412
8413#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008414
8415/*===========================================================================
8416 * FUNCTION : getCapabilities
8417 *
8418 * DESCRIPTION: query camera capability from back-end
8419 *
8420 * PARAMETERS :
8421 * @ops : mm-interface ops structure
8422 * @cam_handle : camera handle for which we need capability
8423 *
8424 * RETURN : ptr type of capability structure
8425 * capability for success
8426 * NULL for failure
8427 *==========================================================================*/
8428cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8429 uint32_t cam_handle)
8430{
8431 int rc = NO_ERROR;
8432 QCamera3HeapMemory *capabilityHeap = NULL;
8433 cam_capability_t *cap_ptr = NULL;
8434
8435 if (ops == NULL) {
8436 LOGE("Invalid arguments");
8437 return NULL;
8438 }
8439
8440 capabilityHeap = new QCamera3HeapMemory(1);
8441 if (capabilityHeap == NULL) {
8442 LOGE("creation of capabilityHeap failed");
8443 return NULL;
8444 }
8445
8446 /* Allocate memory for capability buffer */
8447 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8448 if(rc != OK) {
8449 LOGE("No memory for cappability");
8450 goto allocate_failed;
8451 }
8452
8453 /* Map memory for capability buffer */
8454 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8455
8456 rc = ops->map_buf(cam_handle,
8457 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8458 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8459 if(rc < 0) {
8460 LOGE("failed to map capability buffer");
8461 rc = FAILED_TRANSACTION;
8462 goto map_failed;
8463 }
8464
8465 /* Query Capability */
8466 rc = ops->query_capability(cam_handle);
8467 if(rc < 0) {
8468 LOGE("failed to query capability");
8469 rc = FAILED_TRANSACTION;
8470 goto query_failed;
8471 }
8472
8473 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8474 if (cap_ptr == NULL) {
8475 LOGE("out of memory");
8476 rc = NO_MEMORY;
8477 goto query_failed;
8478 }
8479
8480 memset(cap_ptr, 0, sizeof(cam_capability_t));
8481 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8482
8483 int index;
8484 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8485 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8486 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8487 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8488 }
8489
8490query_failed:
8491 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8492map_failed:
8493 capabilityHeap->deallocate();
8494allocate_failed:
8495 delete capabilityHeap;
8496
8497 if (rc != NO_ERROR) {
8498 return NULL;
8499 } else {
8500 return cap_ptr;
8501 }
8502}
8503
Thierry Strudel3d639192016-09-09 11:52:26 -07008504/*===========================================================================
8505 * FUNCTION : initCapabilities
8506 *
8507 * DESCRIPTION: initialize camera capabilities in static data struct
8508 *
8509 * PARAMETERS :
8510 * @cameraId : camera Id
8511 *
8512 * RETURN : int32_t type of status
8513 * NO_ERROR -- success
8514 * none-zero failure code
8515 *==========================================================================*/
8516int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8517{
8518 int rc = 0;
8519 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008520 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008521
8522 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8523 if (rc) {
8524 LOGE("camera_open failed. rc = %d", rc);
8525 goto open_failed;
8526 }
8527 if (!cameraHandle) {
8528 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8529 goto open_failed;
8530 }
8531
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008532 handle = get_main_camera_handle(cameraHandle->camera_handle);
8533 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8534 if (gCamCapability[cameraId] == NULL) {
8535 rc = FAILED_TRANSACTION;
8536 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008537 }
8538
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008539 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008540 if (is_dual_camera_by_idx(cameraId)) {
8541 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8542 gCamCapability[cameraId]->aux_cam_cap =
8543 getCapabilities(cameraHandle->ops, handle);
8544 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8545 rc = FAILED_TRANSACTION;
8546 free(gCamCapability[cameraId]);
8547 goto failed_op;
8548 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008549
8550 // Copy the main camera capability to main_cam_cap struct
8551 gCamCapability[cameraId]->main_cam_cap =
8552 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8553 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8554 LOGE("out of memory");
8555 rc = NO_MEMORY;
8556 goto failed_op;
8557 }
8558 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8559 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008560 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008561failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008562 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8563 cameraHandle = NULL;
8564open_failed:
8565 return rc;
8566}
8567
8568/*==========================================================================
8569 * FUNCTION : get3Aversion
8570 *
8571 * DESCRIPTION: get the Q3A S/W version
8572 *
8573 * PARAMETERS :
8574 * @sw_version: Reference of Q3A structure which will hold version info upon
8575 * return
8576 *
8577 * RETURN : None
8578 *
8579 *==========================================================================*/
8580void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8581{
8582 if(gCamCapability[mCameraId])
8583 sw_version = gCamCapability[mCameraId]->q3a_version;
8584 else
8585 LOGE("Capability structure NULL!");
8586}
8587
8588
8589/*===========================================================================
8590 * FUNCTION : initParameters
8591 *
8592 * DESCRIPTION: initialize camera parameters
8593 *
8594 * PARAMETERS :
8595 *
8596 * RETURN : int32_t type of status
8597 * NO_ERROR -- success
8598 * none-zero failure code
8599 *==========================================================================*/
8600int QCamera3HardwareInterface::initParameters()
8601{
8602 int rc = 0;
8603
8604 //Allocate Set Param Buffer
8605 mParamHeap = new QCamera3HeapMemory(1);
8606 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8607 if(rc != OK) {
8608 rc = NO_MEMORY;
8609 LOGE("Failed to allocate SETPARM Heap memory");
8610 delete mParamHeap;
8611 mParamHeap = NULL;
8612 return rc;
8613 }
8614
8615 //Map memory for parameters buffer
8616 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8617 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8618 mParamHeap->getFd(0),
8619 sizeof(metadata_buffer_t),
8620 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8621 if(rc < 0) {
8622 LOGE("failed to map SETPARM buffer");
8623 rc = FAILED_TRANSACTION;
8624 mParamHeap->deallocate();
8625 delete mParamHeap;
8626 mParamHeap = NULL;
8627 return rc;
8628 }
8629
8630 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8631
8632 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8633 return rc;
8634}
8635
8636/*===========================================================================
8637 * FUNCTION : deinitParameters
8638 *
8639 * DESCRIPTION: de-initialize camera parameters
8640 *
8641 * PARAMETERS :
8642 *
8643 * RETURN : NONE
8644 *==========================================================================*/
8645void QCamera3HardwareInterface::deinitParameters()
8646{
8647 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8648 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8649
8650 mParamHeap->deallocate();
8651 delete mParamHeap;
8652 mParamHeap = NULL;
8653
8654 mParameters = NULL;
8655
8656 free(mPrevParameters);
8657 mPrevParameters = NULL;
8658}
8659
8660/*===========================================================================
8661 * FUNCTION : calcMaxJpegSize
8662 *
8663 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8664 *
8665 * PARAMETERS :
8666 *
8667 * RETURN : max_jpeg_size
8668 *==========================================================================*/
8669size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8670{
8671 size_t max_jpeg_size = 0;
8672 size_t temp_width, temp_height;
8673 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8674 MAX_SIZES_CNT);
8675 for (size_t i = 0; i < count; i++) {
8676 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8677 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8678 if (temp_width * temp_height > max_jpeg_size ) {
8679 max_jpeg_size = temp_width * temp_height;
8680 }
8681 }
8682 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8683 return max_jpeg_size;
8684}
8685
8686/*===========================================================================
8687 * FUNCTION : getMaxRawSize
8688 *
8689 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8690 *
8691 * PARAMETERS :
8692 *
8693 * RETURN : Largest supported Raw Dimension
8694 *==========================================================================*/
8695cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8696{
8697 int max_width = 0;
8698 cam_dimension_t maxRawSize;
8699
8700 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8701 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8702 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8703 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8704 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8705 }
8706 }
8707 return maxRawSize;
8708}
8709
8710
8711/*===========================================================================
8712 * FUNCTION : calcMaxJpegDim
8713 *
8714 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8715 *
8716 * PARAMETERS :
8717 *
8718 * RETURN : max_jpeg_dim
8719 *==========================================================================*/
8720cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8721{
8722 cam_dimension_t max_jpeg_dim;
8723 cam_dimension_t curr_jpeg_dim;
8724 max_jpeg_dim.width = 0;
8725 max_jpeg_dim.height = 0;
8726 curr_jpeg_dim.width = 0;
8727 curr_jpeg_dim.height = 0;
8728 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8729 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8730 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8731 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8732 max_jpeg_dim.width * max_jpeg_dim.height ) {
8733 max_jpeg_dim.width = curr_jpeg_dim.width;
8734 max_jpeg_dim.height = curr_jpeg_dim.height;
8735 }
8736 }
8737 return max_jpeg_dim;
8738}
8739
8740/*===========================================================================
8741 * FUNCTION : addStreamConfig
8742 *
8743 * DESCRIPTION: adds the stream configuration to the array
8744 *
8745 * PARAMETERS :
8746 * @available_stream_configs : pointer to stream configuration array
8747 * @scalar_format : scalar format
8748 * @dim : configuration dimension
8749 * @config_type : input or output configuration type
8750 *
8751 * RETURN : NONE
8752 *==========================================================================*/
8753void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8754 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8755{
8756 available_stream_configs.add(scalar_format);
8757 available_stream_configs.add(dim.width);
8758 available_stream_configs.add(dim.height);
8759 available_stream_configs.add(config_type);
8760}
8761
8762/*===========================================================================
8763 * FUNCTION : suppportBurstCapture
8764 *
8765 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8766 *
8767 * PARAMETERS :
8768 * @cameraId : camera Id
8769 *
8770 * RETURN : true if camera supports BURST_CAPTURE
8771 * false otherwise
8772 *==========================================================================*/
8773bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8774{
8775 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8776 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8777 const int32_t highResWidth = 3264;
8778 const int32_t highResHeight = 2448;
8779
8780 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8781 // Maximum resolution images cannot be captured at >= 10fps
8782 // -> not supporting BURST_CAPTURE
8783 return false;
8784 }
8785
8786 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8787 // Maximum resolution images can be captured at >= 20fps
8788 // --> supporting BURST_CAPTURE
8789 return true;
8790 }
8791
8792 // Find the smallest highRes resolution, or largest resolution if there is none
8793 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8794 MAX_SIZES_CNT);
8795 size_t highRes = 0;
8796 while ((highRes + 1 < totalCnt) &&
8797 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8798 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8799 highResWidth * highResHeight)) {
8800 highRes++;
8801 }
8802 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8803 return true;
8804 } else {
8805 return false;
8806 }
8807}
8808
8809/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008810 * FUNCTION : getPDStatIndex
8811 *
8812 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8813 *
8814 * PARAMETERS :
8815 * @caps : camera capabilities
8816 *
8817 * RETURN : int32_t type
8818 * non-negative - on success
8819 * -1 - on failure
8820 *==========================================================================*/
8821int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8822 if (nullptr == caps) {
8823 return -1;
8824 }
8825
8826 uint32_t metaRawCount = caps->meta_raw_channel_count;
8827 int32_t ret = -1;
8828 for (size_t i = 0; i < metaRawCount; i++) {
8829 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8830 ret = i;
8831 break;
8832 }
8833 }
8834
8835 return ret;
8836}
8837
8838/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008839 * FUNCTION : initStaticMetadata
8840 *
8841 * DESCRIPTION: initialize the static metadata
8842 *
8843 * PARAMETERS :
8844 * @cameraId : camera Id
8845 *
8846 * RETURN : int32_t type of status
8847 * 0 -- success
8848 * non-zero failure code
8849 *==========================================================================*/
8850int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8851{
8852 int rc = 0;
8853 CameraMetadata staticInfo;
8854 size_t count = 0;
8855 bool limitedDevice = false;
8856 char prop[PROPERTY_VALUE_MAX];
8857 bool supportBurst = false;
8858
8859 supportBurst = supportBurstCapture(cameraId);
8860
8861 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8862 * guaranteed or if min fps of max resolution is less than 20 fps, its
8863 * advertised as limited device*/
8864 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8865 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8866 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8867 !supportBurst;
8868
8869 uint8_t supportedHwLvl = limitedDevice ?
8870 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008871#ifndef USE_HAL_3_3
8872 // LEVEL_3 - This device will support level 3.
8873 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8874#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008875 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008876#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008877
8878 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8879 &supportedHwLvl, 1);
8880
8881 bool facingBack = false;
8882 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8883 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8884 facingBack = true;
8885 }
8886 /*HAL 3 only*/
8887 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8888 &gCamCapability[cameraId]->min_focus_distance, 1);
8889
8890 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8891 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8892
8893 /*should be using focal lengths but sensor doesn't provide that info now*/
8894 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8895 &gCamCapability[cameraId]->focal_length,
8896 1);
8897
8898 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8899 gCamCapability[cameraId]->apertures,
8900 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8901
8902 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8903 gCamCapability[cameraId]->filter_densities,
8904 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8905
8906
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008907 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8908 size_t mode_count =
8909 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8910 for (size_t i = 0; i < mode_count; i++) {
8911 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008913 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008914 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008915
8916 int32_t lens_shading_map_size[] = {
8917 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8918 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8919 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8920 lens_shading_map_size,
8921 sizeof(lens_shading_map_size)/sizeof(int32_t));
8922
8923 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8924 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8925
8926 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8927 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8928
8929 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8930 &gCamCapability[cameraId]->max_frame_duration, 1);
8931
8932 camera_metadata_rational baseGainFactor = {
8933 gCamCapability[cameraId]->base_gain_factor.numerator,
8934 gCamCapability[cameraId]->base_gain_factor.denominator};
8935 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8936 &baseGainFactor, 1);
8937
8938 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8939 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8940
8941 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8942 gCamCapability[cameraId]->pixel_array_size.height};
8943 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8944 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8945
8946 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8947 gCamCapability[cameraId]->active_array_size.top,
8948 gCamCapability[cameraId]->active_array_size.width,
8949 gCamCapability[cameraId]->active_array_size.height};
8950 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8951 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8952
8953 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8954 &gCamCapability[cameraId]->white_level, 1);
8955
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008956 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8957 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8958 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008959 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008960 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008961
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008962#ifndef USE_HAL_3_3
8963 bool hasBlackRegions = false;
8964 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8965 LOGW("black_region_count: %d is bounded to %d",
8966 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8967 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8968 }
8969 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8970 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8971 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8972 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8973 }
8974 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8975 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8976 hasBlackRegions = true;
8977 }
8978#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008979 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8980 &gCamCapability[cameraId]->flash_charge_duration, 1);
8981
8982 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8983 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8984
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008985 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8986 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8987 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008988 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8989 &timestampSource, 1);
8990
Thierry Strudel54dc9782017-02-15 12:12:10 -08008991 //update histogram vendor data
8992 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008993 &gCamCapability[cameraId]->histogram_size, 1);
8994
Thierry Strudel54dc9782017-02-15 12:12:10 -08008995 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008996 &gCamCapability[cameraId]->max_histogram_count, 1);
8997
Shuzhen Wang14415f52016-11-16 18:26:18 -08008998 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
8999 //so that app can request fewer number of bins than the maximum supported.
9000 std::vector<int32_t> histBins;
9001 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9002 histBins.push_back(maxHistBins);
9003 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9004 (maxHistBins & 0x1) == 0) {
9005 histBins.push_back(maxHistBins >> 1);
9006 maxHistBins >>= 1;
9007 }
9008 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9009 histBins.data(), histBins.size());
9010
Thierry Strudel3d639192016-09-09 11:52:26 -07009011 int32_t sharpness_map_size[] = {
9012 gCamCapability[cameraId]->sharpness_map_size.width,
9013 gCamCapability[cameraId]->sharpness_map_size.height};
9014
9015 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9016 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9017
9018 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9019 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9020
Emilian Peev0f3c3162017-03-15 12:57:46 +00009021 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9022 if (0 <= indexPD) {
9023 // Advertise PD stats data as part of the Depth capabilities
9024 int32_t depthWidth =
9025 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9026 int32_t depthHeight =
9027 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9028 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9029 assert(0 < depthSamplesCount);
9030 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9031 &depthSamplesCount, 1);
9032
9033 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9034 depthHeight,
9035 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9036 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9037 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9038 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9039 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9040
9041 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9042 depthHeight, 33333333,
9043 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9044 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9045 depthMinDuration,
9046 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9047
9048 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9049 depthHeight, 0,
9050 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9051 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9052 depthStallDuration,
9053 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9054
9055 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9056 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9057 }
9058
Thierry Strudel3d639192016-09-09 11:52:26 -07009059 int32_t scalar_formats[] = {
9060 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9061 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9062 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9063 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9064 HAL_PIXEL_FORMAT_RAW10,
9065 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009066 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9067 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9068 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009069
9070 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9071 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9072 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9073 count, MAX_SIZES_CNT, available_processed_sizes);
9074 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9075 available_processed_sizes, count * 2);
9076
9077 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9078 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9079 makeTable(gCamCapability[cameraId]->raw_dim,
9080 count, MAX_SIZES_CNT, available_raw_sizes);
9081 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9082 available_raw_sizes, count * 2);
9083
9084 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9085 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9086 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9087 count, MAX_SIZES_CNT, available_fps_ranges);
9088 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9089 available_fps_ranges, count * 2);
9090
9091 camera_metadata_rational exposureCompensationStep = {
9092 gCamCapability[cameraId]->exp_compensation_step.numerator,
9093 gCamCapability[cameraId]->exp_compensation_step.denominator};
9094 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9095 &exposureCompensationStep, 1);
9096
9097 Vector<uint8_t> availableVstabModes;
9098 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9099 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009100 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009101 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009102 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009103 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009104 count = IS_TYPE_MAX;
9105 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9106 for (size_t i = 0; i < count; i++) {
9107 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9108 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9109 eisSupported = true;
9110 break;
9111 }
9112 }
9113 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009114 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9115 }
9116 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9117 availableVstabModes.array(), availableVstabModes.size());
9118
9119 /*HAL 1 and HAL 3 common*/
9120 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9121 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9122 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
9123 float maxZoom = maxZoomStep/minZoomStep;
9124 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9125 &maxZoom, 1);
9126
9127 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9128 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9129
9130 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9131 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9132 max3aRegions[2] = 0; /* AF not supported */
9133 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9134 max3aRegions, 3);
9135
9136 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9137 memset(prop, 0, sizeof(prop));
9138 property_get("persist.camera.facedetect", prop, "1");
9139 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9140 LOGD("Support face detection mode: %d",
9141 supportedFaceDetectMode);
9142
9143 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009144 /* support mode should be OFF if max number of face is 0 */
9145 if (maxFaces <= 0) {
9146 supportedFaceDetectMode = 0;
9147 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009148 Vector<uint8_t> availableFaceDetectModes;
9149 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9150 if (supportedFaceDetectMode == 1) {
9151 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9152 } else if (supportedFaceDetectMode == 2) {
9153 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9154 } else if (supportedFaceDetectMode == 3) {
9155 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9156 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9157 } else {
9158 maxFaces = 0;
9159 }
9160 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9161 availableFaceDetectModes.array(),
9162 availableFaceDetectModes.size());
9163 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9164 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009165 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9166 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9167 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009168
9169 int32_t exposureCompensationRange[] = {
9170 gCamCapability[cameraId]->exposure_compensation_min,
9171 gCamCapability[cameraId]->exposure_compensation_max};
9172 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9173 exposureCompensationRange,
9174 sizeof(exposureCompensationRange)/sizeof(int32_t));
9175
9176 uint8_t lensFacing = (facingBack) ?
9177 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9178 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9179
9180 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9181 available_thumbnail_sizes,
9182 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9183
9184 /*all sizes will be clubbed into this tag*/
9185 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9186 /*android.scaler.availableStreamConfigurations*/
9187 Vector<int32_t> available_stream_configs;
9188 cam_dimension_t active_array_dim;
9189 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9190 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009191
9192 /*advertise list of input dimensions supported based on below property.
9193 By default all sizes upto 5MP will be advertised.
9194 Note that the setprop resolution format should be WxH.
9195 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9196 To list all supported sizes, setprop needs to be set with "0x0" */
9197 cam_dimension_t minInputSize = {2592,1944}; //5MP
9198 memset(prop, 0, sizeof(prop));
9199 property_get("persist.camera.input.minsize", prop, "2592x1944");
9200 if (strlen(prop) > 0) {
9201 char *saveptr = NULL;
9202 char *token = strtok_r(prop, "x", &saveptr);
9203 if (token != NULL) {
9204 minInputSize.width = atoi(token);
9205 }
9206 token = strtok_r(NULL, "x", &saveptr);
9207 if (token != NULL) {
9208 minInputSize.height = atoi(token);
9209 }
9210 }
9211
Thierry Strudel3d639192016-09-09 11:52:26 -07009212 /* Add input/output stream configurations for each scalar formats*/
9213 for (size_t j = 0; j < scalar_formats_count; j++) {
9214 switch (scalar_formats[j]) {
9215 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9216 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9217 case HAL_PIXEL_FORMAT_RAW10:
9218 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9219 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9220 addStreamConfig(available_stream_configs, scalar_formats[j],
9221 gCamCapability[cameraId]->raw_dim[i],
9222 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9223 }
9224 break;
9225 case HAL_PIXEL_FORMAT_BLOB:
9226 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9227 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9228 addStreamConfig(available_stream_configs, scalar_formats[j],
9229 gCamCapability[cameraId]->picture_sizes_tbl[i],
9230 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9231 }
9232 break;
9233 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9234 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9235 default:
9236 cam_dimension_t largest_picture_size;
9237 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9238 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9239 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9240 addStreamConfig(available_stream_configs, scalar_formats[j],
9241 gCamCapability[cameraId]->picture_sizes_tbl[i],
9242 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009243 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9244 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9245 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9246 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9247 >= minInputSize.width) || (gCamCapability[cameraId]->
9248 picture_sizes_tbl[i].height >= minInputSize.height)) {
9249 addStreamConfig(available_stream_configs, scalar_formats[j],
9250 gCamCapability[cameraId]->picture_sizes_tbl[i],
9251 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9252 }
9253 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009254 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009255
Thierry Strudel3d639192016-09-09 11:52:26 -07009256 break;
9257 }
9258 }
9259
9260 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9261 available_stream_configs.array(), available_stream_configs.size());
9262 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9263 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9264
9265 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9266 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9267
9268 /* android.scaler.availableMinFrameDurations */
9269 Vector<int64_t> available_min_durations;
9270 for (size_t j = 0; j < scalar_formats_count; j++) {
9271 switch (scalar_formats[j]) {
9272 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9273 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9274 case HAL_PIXEL_FORMAT_RAW10:
9275 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9276 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9277 available_min_durations.add(scalar_formats[j]);
9278 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9279 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9280 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9281 }
9282 break;
9283 default:
9284 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9285 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9286 available_min_durations.add(scalar_formats[j]);
9287 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9288 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9289 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9290 }
9291 break;
9292 }
9293 }
9294 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9295 available_min_durations.array(), available_min_durations.size());
9296
9297 Vector<int32_t> available_hfr_configs;
9298 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9299 int32_t fps = 0;
9300 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9301 case CAM_HFR_MODE_60FPS:
9302 fps = 60;
9303 break;
9304 case CAM_HFR_MODE_90FPS:
9305 fps = 90;
9306 break;
9307 case CAM_HFR_MODE_120FPS:
9308 fps = 120;
9309 break;
9310 case CAM_HFR_MODE_150FPS:
9311 fps = 150;
9312 break;
9313 case CAM_HFR_MODE_180FPS:
9314 fps = 180;
9315 break;
9316 case CAM_HFR_MODE_210FPS:
9317 fps = 210;
9318 break;
9319 case CAM_HFR_MODE_240FPS:
9320 fps = 240;
9321 break;
9322 case CAM_HFR_MODE_480FPS:
9323 fps = 480;
9324 break;
9325 case CAM_HFR_MODE_OFF:
9326 case CAM_HFR_MODE_MAX:
9327 default:
9328 break;
9329 }
9330
9331 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9332 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9333 /* For each HFR frame rate, need to advertise one variable fps range
9334 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9335 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9336 * set by the app. When video recording is started, [120, 120] is
9337 * set. This way sensor configuration does not change when recording
9338 * is started */
9339
9340 /* (width, height, fps_min, fps_max, batch_size_max) */
9341 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9342 j < MAX_SIZES_CNT; j++) {
9343 available_hfr_configs.add(
9344 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9345 available_hfr_configs.add(
9346 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9347 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9348 available_hfr_configs.add(fps);
9349 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9350
9351 /* (width, height, fps_min, fps_max, batch_size_max) */
9352 available_hfr_configs.add(
9353 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9354 available_hfr_configs.add(
9355 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9356 available_hfr_configs.add(fps);
9357 available_hfr_configs.add(fps);
9358 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9359 }
9360 }
9361 }
9362 //Advertise HFR capability only if the property is set
9363 memset(prop, 0, sizeof(prop));
9364 property_get("persist.camera.hal3hfr.enable", prop, "1");
9365 uint8_t hfrEnable = (uint8_t)atoi(prop);
9366
9367 if(hfrEnable && available_hfr_configs.array()) {
9368 staticInfo.update(
9369 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9370 available_hfr_configs.array(), available_hfr_configs.size());
9371 }
9372
9373 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9374 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9375 &max_jpeg_size, 1);
9376
9377 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9378 size_t size = 0;
9379 count = CAM_EFFECT_MODE_MAX;
9380 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9381 for (size_t i = 0; i < count; i++) {
9382 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9383 gCamCapability[cameraId]->supported_effects[i]);
9384 if (NAME_NOT_FOUND != val) {
9385 avail_effects[size] = (uint8_t)val;
9386 size++;
9387 }
9388 }
9389 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9390 avail_effects,
9391 size);
9392
9393 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9394 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9395 size_t supported_scene_modes_cnt = 0;
9396 count = CAM_SCENE_MODE_MAX;
9397 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9398 for (size_t i = 0; i < count; i++) {
9399 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9400 CAM_SCENE_MODE_OFF) {
9401 int val = lookupFwkName(SCENE_MODES_MAP,
9402 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9403 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009404
Thierry Strudel3d639192016-09-09 11:52:26 -07009405 if (NAME_NOT_FOUND != val) {
9406 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9407 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9408 supported_scene_modes_cnt++;
9409 }
9410 }
9411 }
9412 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9413 avail_scene_modes,
9414 supported_scene_modes_cnt);
9415
9416 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9417 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9418 supported_scene_modes_cnt,
9419 CAM_SCENE_MODE_MAX,
9420 scene_mode_overrides,
9421 supported_indexes,
9422 cameraId);
9423
9424 if (supported_scene_modes_cnt == 0) {
9425 supported_scene_modes_cnt = 1;
9426 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9427 }
9428
9429 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9430 scene_mode_overrides, supported_scene_modes_cnt * 3);
9431
9432 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9433 ANDROID_CONTROL_MODE_AUTO,
9434 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9435 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9436 available_control_modes,
9437 3);
9438
9439 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9440 size = 0;
9441 count = CAM_ANTIBANDING_MODE_MAX;
9442 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9443 for (size_t i = 0; i < count; i++) {
9444 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9445 gCamCapability[cameraId]->supported_antibandings[i]);
9446 if (NAME_NOT_FOUND != val) {
9447 avail_antibanding_modes[size] = (uint8_t)val;
9448 size++;
9449 }
9450
9451 }
9452 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9453 avail_antibanding_modes,
9454 size);
9455
9456 uint8_t avail_abberation_modes[] = {
9457 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9458 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9459 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9460 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9461 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9462 if (0 == count) {
9463 // If no aberration correction modes are available for a device, this advertise OFF mode
9464 size = 1;
9465 } else {
9466 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9467 // So, advertize all 3 modes if atleast any one mode is supported as per the
9468 // new M requirement
9469 size = 3;
9470 }
9471 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9472 avail_abberation_modes,
9473 size);
9474
9475 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9476 size = 0;
9477 count = CAM_FOCUS_MODE_MAX;
9478 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9479 for (size_t i = 0; i < count; i++) {
9480 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9481 gCamCapability[cameraId]->supported_focus_modes[i]);
9482 if (NAME_NOT_FOUND != val) {
9483 avail_af_modes[size] = (uint8_t)val;
9484 size++;
9485 }
9486 }
9487 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9488 avail_af_modes,
9489 size);
9490
9491 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9492 size = 0;
9493 count = CAM_WB_MODE_MAX;
9494 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9495 for (size_t i = 0; i < count; i++) {
9496 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9497 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9498 gCamCapability[cameraId]->supported_white_balances[i]);
9499 if (NAME_NOT_FOUND != val) {
9500 avail_awb_modes[size] = (uint8_t)val;
9501 size++;
9502 }
9503 }
9504 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9505 avail_awb_modes,
9506 size);
9507
9508 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9509 count = CAM_FLASH_FIRING_LEVEL_MAX;
9510 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9511 count);
9512 for (size_t i = 0; i < count; i++) {
9513 available_flash_levels[i] =
9514 gCamCapability[cameraId]->supported_firing_levels[i];
9515 }
9516 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9517 available_flash_levels, count);
9518
9519 uint8_t flashAvailable;
9520 if (gCamCapability[cameraId]->flash_available)
9521 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9522 else
9523 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9524 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9525 &flashAvailable, 1);
9526
9527 Vector<uint8_t> avail_ae_modes;
9528 count = CAM_AE_MODE_MAX;
9529 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9530 for (size_t i = 0; i < count; i++) {
9531 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9532 }
9533 if (flashAvailable) {
9534 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9535 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009536 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009537 }
9538 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9539 avail_ae_modes.array(),
9540 avail_ae_modes.size());
9541
9542 int32_t sensitivity_range[2];
9543 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9544 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9545 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9546 sensitivity_range,
9547 sizeof(sensitivity_range) / sizeof(int32_t));
9548
9549 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9550 &gCamCapability[cameraId]->max_analog_sensitivity,
9551 1);
9552
9553 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9554 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9555 &sensor_orientation,
9556 1);
9557
9558 int32_t max_output_streams[] = {
9559 MAX_STALLING_STREAMS,
9560 MAX_PROCESSED_STREAMS,
9561 MAX_RAW_STREAMS};
9562 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9563 max_output_streams,
9564 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9565
9566 uint8_t avail_leds = 0;
9567 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9568 &avail_leds, 0);
9569
9570 uint8_t focus_dist_calibrated;
9571 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9572 gCamCapability[cameraId]->focus_dist_calibrated);
9573 if (NAME_NOT_FOUND != val) {
9574 focus_dist_calibrated = (uint8_t)val;
9575 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9576 &focus_dist_calibrated, 1);
9577 }
9578
9579 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9580 size = 0;
9581 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9582 MAX_TEST_PATTERN_CNT);
9583 for (size_t i = 0; i < count; i++) {
9584 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9585 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9586 if (NAME_NOT_FOUND != testpatternMode) {
9587 avail_testpattern_modes[size] = testpatternMode;
9588 size++;
9589 }
9590 }
9591 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9592 avail_testpattern_modes,
9593 size);
9594
9595 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9596 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9597 &max_pipeline_depth,
9598 1);
9599
9600 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9601 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9602 &partial_result_count,
9603 1);
9604
9605 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9606 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9607
9608 Vector<uint8_t> available_capabilities;
9609 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9610 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9611 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9612 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9613 if (supportBurst) {
9614 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9615 }
9616 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9617 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9618 if (hfrEnable && available_hfr_configs.array()) {
9619 available_capabilities.add(
9620 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9621 }
9622
9623 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9624 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9625 }
9626 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9627 available_capabilities.array(),
9628 available_capabilities.size());
9629
9630 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9631 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9632 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9633 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9634
9635 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9636 &aeLockAvailable, 1);
9637
9638 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9639 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9640 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9641 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9642
9643 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9644 &awbLockAvailable, 1);
9645
9646 int32_t max_input_streams = 1;
9647 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9648 &max_input_streams,
9649 1);
9650
9651 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9652 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9653 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9654 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9655 HAL_PIXEL_FORMAT_YCbCr_420_888};
9656 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9657 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9658
9659 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9660 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9661 &max_latency,
9662 1);
9663
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009664#ifndef USE_HAL_3_3
9665 int32_t isp_sensitivity_range[2];
9666 isp_sensitivity_range[0] =
9667 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9668 isp_sensitivity_range[1] =
9669 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9670 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9671 isp_sensitivity_range,
9672 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9673#endif
9674
Thierry Strudel3d639192016-09-09 11:52:26 -07009675 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9676 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9677 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9678 available_hot_pixel_modes,
9679 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9680
9681 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9682 ANDROID_SHADING_MODE_FAST,
9683 ANDROID_SHADING_MODE_HIGH_QUALITY};
9684 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9685 available_shading_modes,
9686 3);
9687
9688 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9689 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9690 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9691 available_lens_shading_map_modes,
9692 2);
9693
9694 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9695 ANDROID_EDGE_MODE_FAST,
9696 ANDROID_EDGE_MODE_HIGH_QUALITY,
9697 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9698 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9699 available_edge_modes,
9700 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9701
9702 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9703 ANDROID_NOISE_REDUCTION_MODE_FAST,
9704 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9705 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9706 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9707 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9708 available_noise_red_modes,
9709 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9710
9711 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9712 ANDROID_TONEMAP_MODE_FAST,
9713 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9714 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9715 available_tonemap_modes,
9716 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9717
9718 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9719 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9720 available_hot_pixel_map_modes,
9721 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9722
9723 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9724 gCamCapability[cameraId]->reference_illuminant1);
9725 if (NAME_NOT_FOUND != val) {
9726 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9727 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9728 }
9729
9730 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9731 gCamCapability[cameraId]->reference_illuminant2);
9732 if (NAME_NOT_FOUND != val) {
9733 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9734 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9735 }
9736
9737 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9738 (void *)gCamCapability[cameraId]->forward_matrix1,
9739 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9740
9741 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9742 (void *)gCamCapability[cameraId]->forward_matrix2,
9743 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9744
9745 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9746 (void *)gCamCapability[cameraId]->color_transform1,
9747 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9748
9749 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9750 (void *)gCamCapability[cameraId]->color_transform2,
9751 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9752
9753 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9754 (void *)gCamCapability[cameraId]->calibration_transform1,
9755 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9756
9757 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9758 (void *)gCamCapability[cameraId]->calibration_transform2,
9759 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9760
9761 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9762 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9763 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9764 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9765 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9766 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9767 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9768 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9769 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9770 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9771 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9772 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9773 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9774 ANDROID_JPEG_GPS_COORDINATES,
9775 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9776 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9777 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9778 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9779 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9780 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9781 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9782 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9783 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9784 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009785#ifndef USE_HAL_3_3
9786 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9787#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009788 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009789 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009790 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9791 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009792 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009793 /* DevCamDebug metadata request_keys_basic */
9794 DEVCAMDEBUG_META_ENABLE,
9795 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009796 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9797 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009798 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009799
9800 size_t request_keys_cnt =
9801 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9802 Vector<int32_t> available_request_keys;
9803 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9804 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9805 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9806 }
9807
9808 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9809 available_request_keys.array(), available_request_keys.size());
9810
9811 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9812 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9813 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9814 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9815 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9816 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9817 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9818 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9819 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9820 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9821 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9822 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9823 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9824 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9825 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9826 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9827 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009828 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009829 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9830 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9831 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009832 ANDROID_STATISTICS_FACE_SCORES,
9833#ifndef USE_HAL_3_3
9834 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9835#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009836 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009837 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009838 // DevCamDebug metadata result_keys_basic
9839 DEVCAMDEBUG_META_ENABLE,
9840 // DevCamDebug metadata result_keys AF
9841 DEVCAMDEBUG_AF_LENS_POSITION,
9842 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9843 DEVCAMDEBUG_AF_TOF_DISTANCE,
9844 DEVCAMDEBUG_AF_LUMA,
9845 DEVCAMDEBUG_AF_HAF_STATE,
9846 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9847 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9848 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9849 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9850 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9851 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9852 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9853 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9854 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9855 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9856 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9857 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9858 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9859 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9860 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9861 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9862 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9863 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9864 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9865 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9866 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9867 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9868 // DevCamDebug metadata result_keys AEC
9869 DEVCAMDEBUG_AEC_TARGET_LUMA,
9870 DEVCAMDEBUG_AEC_COMP_LUMA,
9871 DEVCAMDEBUG_AEC_AVG_LUMA,
9872 DEVCAMDEBUG_AEC_CUR_LUMA,
9873 DEVCAMDEBUG_AEC_LINECOUNT,
9874 DEVCAMDEBUG_AEC_REAL_GAIN,
9875 DEVCAMDEBUG_AEC_EXP_INDEX,
9876 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009877 // DevCamDebug metadata result_keys zzHDR
9878 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9879 DEVCAMDEBUG_AEC_L_LINECOUNT,
9880 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9881 DEVCAMDEBUG_AEC_S_LINECOUNT,
9882 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9883 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9884 // DevCamDebug metadata result_keys ADRC
9885 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9886 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9887 DEVCAMDEBUG_AEC_GTM_RATIO,
9888 DEVCAMDEBUG_AEC_LTM_RATIO,
9889 DEVCAMDEBUG_AEC_LA_RATIO,
9890 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009891 // DevCamDebug metadata result_keys AWB
9892 DEVCAMDEBUG_AWB_R_GAIN,
9893 DEVCAMDEBUG_AWB_G_GAIN,
9894 DEVCAMDEBUG_AWB_B_GAIN,
9895 DEVCAMDEBUG_AWB_CCT,
9896 DEVCAMDEBUG_AWB_DECISION,
9897 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009898 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9899 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9900 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009901 };
9902
Thierry Strudel3d639192016-09-09 11:52:26 -07009903 size_t result_keys_cnt =
9904 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9905
9906 Vector<int32_t> available_result_keys;
9907 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9908 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9909 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9910 }
9911 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9912 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9913 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9914 }
9915 if (supportedFaceDetectMode == 1) {
9916 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9917 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9918 } else if ((supportedFaceDetectMode == 2) ||
9919 (supportedFaceDetectMode == 3)) {
9920 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9921 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9922 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009923#ifndef USE_HAL_3_3
9924 if (hasBlackRegions) {
9925 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9926 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9927 }
9928#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009929 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9930 available_result_keys.array(), available_result_keys.size());
9931
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009932 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009933 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9934 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9935 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9936 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9937 ANDROID_SCALER_CROPPING_TYPE,
9938 ANDROID_SYNC_MAX_LATENCY,
9939 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9940 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9941 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9942 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9943 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9944 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9945 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9946 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9947 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9948 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9949 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9950 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9951 ANDROID_LENS_FACING,
9952 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9953 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9954 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9955 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9956 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9957 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9958 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9959 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9960 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9961 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9962 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9963 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9964 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9965 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9966 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9967 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9968 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9969 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9970 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9971 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009972 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009973 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9974 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9975 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9976 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9977 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9978 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9979 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9980 ANDROID_CONTROL_AVAILABLE_MODES,
9981 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9982 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9983 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9984 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009985 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9986#ifndef USE_HAL_3_3
9987 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9988 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9989#endif
9990 };
9991
9992 Vector<int32_t> available_characteristics_keys;
9993 available_characteristics_keys.appendArray(characteristics_keys_basic,
9994 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9995#ifndef USE_HAL_3_3
9996 if (hasBlackRegions) {
9997 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9998 }
9999#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010000
10001 if (0 <= indexPD) {
10002 int32_t depthKeys[] = {
10003 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10004 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10005 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10006 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10007 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10008 };
10009 available_characteristics_keys.appendArray(depthKeys,
10010 sizeof(depthKeys) / sizeof(depthKeys[0]));
10011 }
10012
Thierry Strudel3d639192016-09-09 11:52:26 -070010013 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010014 available_characteristics_keys.array(),
10015 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010016
10017 /*available stall durations depend on the hw + sw and will be different for different devices */
10018 /*have to add for raw after implementation*/
10019 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10020 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10021
10022 Vector<int64_t> available_stall_durations;
10023 for (uint32_t j = 0; j < stall_formats_count; j++) {
10024 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10025 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10026 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10027 available_stall_durations.add(stall_formats[j]);
10028 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10029 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10030 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10031 }
10032 } else {
10033 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10034 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10035 available_stall_durations.add(stall_formats[j]);
10036 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10037 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10038 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10039 }
10040 }
10041 }
10042 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10043 available_stall_durations.array(),
10044 available_stall_durations.size());
10045
10046 //QCAMERA3_OPAQUE_RAW
10047 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10048 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10049 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10050 case LEGACY_RAW:
10051 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10052 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10053 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10054 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10055 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10056 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10057 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10058 break;
10059 case MIPI_RAW:
10060 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10061 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10062 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10063 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10064 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10065 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10066 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10067 break;
10068 default:
10069 LOGE("unknown opaque_raw_format %d",
10070 gCamCapability[cameraId]->opaque_raw_fmt);
10071 break;
10072 }
10073 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10074
10075 Vector<int32_t> strides;
10076 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10077 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10078 cam_stream_buf_plane_info_t buf_planes;
10079 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10080 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10081 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10082 &gCamCapability[cameraId]->padding_info, &buf_planes);
10083 strides.add(buf_planes.plane_info.mp[0].stride);
10084 }
10085 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10086 strides.size());
10087
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010088 //TBD: remove the following line once backend advertises zzHDR in feature mask
10089 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010090 //Video HDR default
10091 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10092 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010093 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010094 int32_t vhdr_mode[] = {
10095 QCAMERA3_VIDEO_HDR_MODE_OFF,
10096 QCAMERA3_VIDEO_HDR_MODE_ON};
10097
10098 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10099 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10100 vhdr_mode, vhdr_mode_count);
10101 }
10102
Thierry Strudel3d639192016-09-09 11:52:26 -070010103 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10104 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10105 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10106
10107 uint8_t isMonoOnly =
10108 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10109 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10110 &isMonoOnly, 1);
10111
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010112#ifndef USE_HAL_3_3
10113 Vector<int32_t> opaque_size;
10114 for (size_t j = 0; j < scalar_formats_count; j++) {
10115 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10116 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10117 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10118 cam_stream_buf_plane_info_t buf_planes;
10119
10120 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10121 &gCamCapability[cameraId]->padding_info, &buf_planes);
10122
10123 if (rc == 0) {
10124 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10125 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10126 opaque_size.add(buf_planes.plane_info.frame_len);
10127 }else {
10128 LOGE("raw frame calculation failed!");
10129 }
10130 }
10131 }
10132 }
10133
10134 if ((opaque_size.size() > 0) &&
10135 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10136 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10137 else
10138 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10139#endif
10140
Thierry Strudel04e026f2016-10-10 11:27:36 -070010141 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10142 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10143 size = 0;
10144 count = CAM_IR_MODE_MAX;
10145 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10146 for (size_t i = 0; i < count; i++) {
10147 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10148 gCamCapability[cameraId]->supported_ir_modes[i]);
10149 if (NAME_NOT_FOUND != val) {
10150 avail_ir_modes[size] = (int32_t)val;
10151 size++;
10152 }
10153 }
10154 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10155 avail_ir_modes, size);
10156 }
10157
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010158 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10159 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10160 size = 0;
10161 count = CAM_AEC_CONVERGENCE_MAX;
10162 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10163 for (size_t i = 0; i < count; i++) {
10164 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10165 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10166 if (NAME_NOT_FOUND != val) {
10167 available_instant_aec_modes[size] = (int32_t)val;
10168 size++;
10169 }
10170 }
10171 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10172 available_instant_aec_modes, size);
10173 }
10174
Thierry Strudel54dc9782017-02-15 12:12:10 -080010175 int32_t sharpness_range[] = {
10176 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10177 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10178 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10179
10180 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10181 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10182 size = 0;
10183 count = CAM_BINNING_CORRECTION_MODE_MAX;
10184 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10185 for (size_t i = 0; i < count; i++) {
10186 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10187 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10188 gCamCapability[cameraId]->supported_binning_modes[i]);
10189 if (NAME_NOT_FOUND != val) {
10190 avail_binning_modes[size] = (int32_t)val;
10191 size++;
10192 }
10193 }
10194 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10195 avail_binning_modes, size);
10196 }
10197
10198 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10199 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10200 size = 0;
10201 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10202 for (size_t i = 0; i < count; i++) {
10203 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10204 gCamCapability[cameraId]->supported_aec_modes[i]);
10205 if (NAME_NOT_FOUND != val)
10206 available_aec_modes[size++] = val;
10207 }
10208 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10209 available_aec_modes, size);
10210 }
10211
10212 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10213 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10214 size = 0;
10215 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10216 for (size_t i = 0; i < count; i++) {
10217 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10218 gCamCapability[cameraId]->supported_iso_modes[i]);
10219 if (NAME_NOT_FOUND != val)
10220 available_iso_modes[size++] = val;
10221 }
10222 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10223 available_iso_modes, size);
10224 }
10225
10226 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10227 for (size_t i = 0; i < count; i++)
10228 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10229 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10230 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10231
10232 int32_t available_saturation_range[4];
10233 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10234 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10235 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10236 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10237 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10238 available_saturation_range, 4);
10239
10240 uint8_t is_hdr_values[2];
10241 is_hdr_values[0] = 0;
10242 is_hdr_values[1] = 1;
10243 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10244 is_hdr_values, 2);
10245
10246 float is_hdr_confidence_range[2];
10247 is_hdr_confidence_range[0] = 0.0;
10248 is_hdr_confidence_range[1] = 1.0;
10249 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10250 is_hdr_confidence_range, 2);
10251
Emilian Peev0a972ef2017-03-16 10:25:53 +000010252 size_t eepromLength = strnlen(
10253 reinterpret_cast<const char *>(
10254 gCamCapability[cameraId]->eeprom_version_info),
10255 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10256 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010257 char easelInfo[] = ",E:N";
10258 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10259 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10260 eepromLength += sizeof(easelInfo);
10261 strlcat(eepromInfo, (gHdrPlusClient ? ",E:Y" : ",E:N"), MAX_EEPROM_VERSION_INFO_LEN);
10262 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010263 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10264 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10265 }
10266
Thierry Strudel3d639192016-09-09 11:52:26 -070010267 gStaticMetadata[cameraId] = staticInfo.release();
10268 return rc;
10269}
10270
10271/*===========================================================================
10272 * FUNCTION : makeTable
10273 *
10274 * DESCRIPTION: make a table of sizes
10275 *
10276 * PARAMETERS :
10277 *
10278 *
10279 *==========================================================================*/
10280void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10281 size_t max_size, int32_t *sizeTable)
10282{
10283 size_t j = 0;
10284 if (size > max_size) {
10285 size = max_size;
10286 }
10287 for (size_t i = 0; i < size; i++) {
10288 sizeTable[j] = dimTable[i].width;
10289 sizeTable[j+1] = dimTable[i].height;
10290 j+=2;
10291 }
10292}
10293
10294/*===========================================================================
10295 * FUNCTION : makeFPSTable
10296 *
10297 * DESCRIPTION: make a table of fps ranges
10298 *
10299 * PARAMETERS :
10300 *
10301 *==========================================================================*/
10302void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10303 size_t max_size, int32_t *fpsRangesTable)
10304{
10305 size_t j = 0;
10306 if (size > max_size) {
10307 size = max_size;
10308 }
10309 for (size_t i = 0; i < size; i++) {
10310 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10311 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10312 j+=2;
10313 }
10314}
10315
10316/*===========================================================================
10317 * FUNCTION : makeOverridesList
10318 *
10319 * DESCRIPTION: make a list of scene mode overrides
10320 *
10321 * PARAMETERS :
10322 *
10323 *
10324 *==========================================================================*/
10325void QCamera3HardwareInterface::makeOverridesList(
10326 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10327 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10328{
10329 /*daemon will give a list of overrides for all scene modes.
10330 However we should send the fwk only the overrides for the scene modes
10331 supported by the framework*/
10332 size_t j = 0;
10333 if (size > max_size) {
10334 size = max_size;
10335 }
10336 size_t focus_count = CAM_FOCUS_MODE_MAX;
10337 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10338 focus_count);
10339 for (size_t i = 0; i < size; i++) {
10340 bool supt = false;
10341 size_t index = supported_indexes[i];
10342 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10343 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10344 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10345 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10346 overridesTable[index].awb_mode);
10347 if (NAME_NOT_FOUND != val) {
10348 overridesList[j+1] = (uint8_t)val;
10349 }
10350 uint8_t focus_override = overridesTable[index].af_mode;
10351 for (size_t k = 0; k < focus_count; k++) {
10352 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10353 supt = true;
10354 break;
10355 }
10356 }
10357 if (supt) {
10358 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10359 focus_override);
10360 if (NAME_NOT_FOUND != val) {
10361 overridesList[j+2] = (uint8_t)val;
10362 }
10363 } else {
10364 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10365 }
10366 j+=3;
10367 }
10368}
10369
10370/*===========================================================================
10371 * FUNCTION : filterJpegSizes
10372 *
10373 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10374 * could be downscaled to
10375 *
10376 * PARAMETERS :
10377 *
10378 * RETURN : length of jpegSizes array
10379 *==========================================================================*/
10380
10381size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10382 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10383 uint8_t downscale_factor)
10384{
10385 if (0 == downscale_factor) {
10386 downscale_factor = 1;
10387 }
10388
10389 int32_t min_width = active_array_size.width / downscale_factor;
10390 int32_t min_height = active_array_size.height / downscale_factor;
10391 size_t jpegSizesCnt = 0;
10392 if (processedSizesCnt > maxCount) {
10393 processedSizesCnt = maxCount;
10394 }
10395 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10396 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10397 jpegSizes[jpegSizesCnt] = processedSizes[i];
10398 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10399 jpegSizesCnt += 2;
10400 }
10401 }
10402 return jpegSizesCnt;
10403}
10404
10405/*===========================================================================
10406 * FUNCTION : computeNoiseModelEntryS
10407 *
10408 * DESCRIPTION: function to map a given sensitivity to the S noise
10409 * model parameters in the DNG noise model.
10410 *
10411 * PARAMETERS : sens : the sensor sensitivity
10412 *
10413 ** RETURN : S (sensor amplification) noise
10414 *
10415 *==========================================================================*/
10416double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10417 double s = gCamCapability[mCameraId]->gradient_S * sens +
10418 gCamCapability[mCameraId]->offset_S;
10419 return ((s < 0.0) ? 0.0 : s);
10420}
10421
10422/*===========================================================================
10423 * FUNCTION : computeNoiseModelEntryO
10424 *
10425 * DESCRIPTION: function to map a given sensitivity to the O noise
10426 * model parameters in the DNG noise model.
10427 *
10428 * PARAMETERS : sens : the sensor sensitivity
10429 *
10430 ** RETURN : O (sensor readout) noise
10431 *
10432 *==========================================================================*/
10433double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10434 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10435 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10436 1.0 : (1.0 * sens / max_analog_sens);
10437 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10438 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10439 return ((o < 0.0) ? 0.0 : o);
10440}
10441
10442/*===========================================================================
10443 * FUNCTION : getSensorSensitivity
10444 *
10445 * DESCRIPTION: convert iso_mode to an integer value
10446 *
10447 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10448 *
10449 ** RETURN : sensitivity supported by sensor
10450 *
10451 *==========================================================================*/
10452int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10453{
10454 int32_t sensitivity;
10455
10456 switch (iso_mode) {
10457 case CAM_ISO_MODE_100:
10458 sensitivity = 100;
10459 break;
10460 case CAM_ISO_MODE_200:
10461 sensitivity = 200;
10462 break;
10463 case CAM_ISO_MODE_400:
10464 sensitivity = 400;
10465 break;
10466 case CAM_ISO_MODE_800:
10467 sensitivity = 800;
10468 break;
10469 case CAM_ISO_MODE_1600:
10470 sensitivity = 1600;
10471 break;
10472 default:
10473 sensitivity = -1;
10474 break;
10475 }
10476 return sensitivity;
10477}
10478
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010479int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10480 if (gHdrPlusClient != nullptr) {
10481 return OK;
10482 }
10483
10484 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10485 if (gHdrPlusClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010486 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10487 // to connect to Easel.
10488 bool doNotpowerOnEasel =
10489 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10490
10491 if (doNotpowerOnEasel) {
10492 gHdrPlusClient = nullptr;
10493 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10494 return OK;
10495 }
10496
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010497 // If Easel is present, power on Easel and suspend it immediately.
10498 status_t res = gHdrPlusClient->powerOnEasel();
10499 if (res != OK) {
10500 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10501 gHdrPlusClient = nullptr;
10502 return res;
10503 }
10504
10505 res = gHdrPlusClient->suspendEasel();
10506 if (res != OK) {
10507 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10508 }
10509
10510 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10511 } else {
10512 // Destroy HDR+ client if Easel isn't present.
10513 gHdrPlusClient = nullptr;
10514 }
10515
10516 return OK;
10517}
10518
Thierry Strudel3d639192016-09-09 11:52:26 -070010519/*===========================================================================
10520 * FUNCTION : getCamInfo
10521 *
10522 * DESCRIPTION: query camera capabilities
10523 *
10524 * PARAMETERS :
10525 * @cameraId : camera Id
10526 * @info : camera info struct to be filled in with camera capabilities
10527 *
10528 * RETURN : int type of status
10529 * NO_ERROR -- success
10530 * none-zero failure code
10531 *==========================================================================*/
10532int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10533 struct camera_info *info)
10534{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010535 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010536 int rc = 0;
10537
10538 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010539
10540 rc = initHdrPlusClientLocked();
10541 if (rc != OK) {
10542 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10543 pthread_mutex_unlock(&gCamLock);
10544 return rc;
10545 }
10546
Thierry Strudel3d639192016-09-09 11:52:26 -070010547 if (NULL == gCamCapability[cameraId]) {
10548 rc = initCapabilities(cameraId);
10549 if (rc < 0) {
10550 pthread_mutex_unlock(&gCamLock);
10551 return rc;
10552 }
10553 }
10554
10555 if (NULL == gStaticMetadata[cameraId]) {
10556 rc = initStaticMetadata(cameraId);
10557 if (rc < 0) {
10558 pthread_mutex_unlock(&gCamLock);
10559 return rc;
10560 }
10561 }
10562
10563 switch(gCamCapability[cameraId]->position) {
10564 case CAM_POSITION_BACK:
10565 case CAM_POSITION_BACK_AUX:
10566 info->facing = CAMERA_FACING_BACK;
10567 break;
10568
10569 case CAM_POSITION_FRONT:
10570 case CAM_POSITION_FRONT_AUX:
10571 info->facing = CAMERA_FACING_FRONT;
10572 break;
10573
10574 default:
10575 LOGE("Unknown position type %d for camera id:%d",
10576 gCamCapability[cameraId]->position, cameraId);
10577 rc = -1;
10578 break;
10579 }
10580
10581
10582 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010583#ifndef USE_HAL_3_3
10584 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10585#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010586 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010587#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010588 info->static_camera_characteristics = gStaticMetadata[cameraId];
10589
10590 //For now assume both cameras can operate independently.
10591 info->conflicting_devices = NULL;
10592 info->conflicting_devices_length = 0;
10593
10594 //resource cost is 100 * MIN(1.0, m/M),
10595 //where m is throughput requirement with maximum stream configuration
10596 //and M is CPP maximum throughput.
10597 float max_fps = 0.0;
10598 for (uint32_t i = 0;
10599 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10600 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10601 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10602 }
10603 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10604 gCamCapability[cameraId]->active_array_size.width *
10605 gCamCapability[cameraId]->active_array_size.height * max_fps /
10606 gCamCapability[cameraId]->max_pixel_bandwidth;
10607 info->resource_cost = 100 * MIN(1.0, ratio);
10608 LOGI("camera %d resource cost is %d", cameraId,
10609 info->resource_cost);
10610
10611 pthread_mutex_unlock(&gCamLock);
10612 return rc;
10613}
10614
10615/*===========================================================================
10616 * FUNCTION : translateCapabilityToMetadata
10617 *
10618 * DESCRIPTION: translate the capability into camera_metadata_t
10619 *
10620 * PARAMETERS : type of the request
10621 *
10622 *
10623 * RETURN : success: camera_metadata_t*
10624 * failure: NULL
10625 *
10626 *==========================================================================*/
10627camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10628{
10629 if (mDefaultMetadata[type] != NULL) {
10630 return mDefaultMetadata[type];
10631 }
10632 //first time we are handling this request
10633 //fill up the metadata structure using the wrapper class
10634 CameraMetadata settings;
10635 //translate from cam_capability_t to camera_metadata_tag_t
10636 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10637 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10638 int32_t defaultRequestID = 0;
10639 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10640
10641 /* OIS disable */
10642 char ois_prop[PROPERTY_VALUE_MAX];
10643 memset(ois_prop, 0, sizeof(ois_prop));
10644 property_get("persist.camera.ois.disable", ois_prop, "0");
10645 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10646
10647 /* Force video to use OIS */
10648 char videoOisProp[PROPERTY_VALUE_MAX];
10649 memset(videoOisProp, 0, sizeof(videoOisProp));
10650 property_get("persist.camera.ois.video", videoOisProp, "1");
10651 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010652
10653 // Hybrid AE enable/disable
10654 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10655 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10656 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10657 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10658
Thierry Strudel3d639192016-09-09 11:52:26 -070010659 uint8_t controlIntent = 0;
10660 uint8_t focusMode;
10661 uint8_t vsMode;
10662 uint8_t optStabMode;
10663 uint8_t cacMode;
10664 uint8_t edge_mode;
10665 uint8_t noise_red_mode;
10666 uint8_t tonemap_mode;
10667 bool highQualityModeEntryAvailable = FALSE;
10668 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010669 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010670 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10671 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010672 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010673
Thierry Strudel3d639192016-09-09 11:52:26 -070010674 switch (type) {
10675 case CAMERA3_TEMPLATE_PREVIEW:
10676 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10677 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10678 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10679 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10680 edge_mode = ANDROID_EDGE_MODE_FAST;
10681 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10682 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10683 break;
10684 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10685 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10686 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10687 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10688 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10689 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10690 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10691 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10692 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10693 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10694 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10695 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10696 highQualityModeEntryAvailable = TRUE;
10697 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10698 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10699 fastModeEntryAvailable = TRUE;
10700 }
10701 }
10702 if (highQualityModeEntryAvailable) {
10703 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10704 } else if (fastModeEntryAvailable) {
10705 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10706 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010707 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10708 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10709 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010710 break;
10711 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10712 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10713 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10714 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010715 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10716 edge_mode = ANDROID_EDGE_MODE_FAST;
10717 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10718 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10719 if (forceVideoOis)
10720 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10721 break;
10722 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10723 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10724 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10725 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010726 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10727 edge_mode = ANDROID_EDGE_MODE_FAST;
10728 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10729 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10730 if (forceVideoOis)
10731 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10732 break;
10733 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10734 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10735 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10736 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10737 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10738 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10739 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10740 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10741 break;
10742 case CAMERA3_TEMPLATE_MANUAL:
10743 edge_mode = ANDROID_EDGE_MODE_FAST;
10744 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10745 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10746 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10747 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10748 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10749 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10750 break;
10751 default:
10752 edge_mode = ANDROID_EDGE_MODE_FAST;
10753 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10754 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10755 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10756 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10757 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10758 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10759 break;
10760 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010761 // Set CAC to OFF if underlying device doesn't support
10762 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10763 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10764 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010765 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10766 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10767 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10768 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10769 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10770 }
10771 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010772 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010773
10774 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10775 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10776 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10777 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10778 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10779 || ois_disable)
10780 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10781 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010782 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010783
10784 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10785 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10786
10787 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10788 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10789
10790 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10791 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10792
10793 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10794 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10795
10796 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10797 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10798
10799 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10800 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10801
10802 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10803 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10804
10805 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10806 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10807
10808 /*flash*/
10809 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10810 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10811
10812 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10813 settings.update(ANDROID_FLASH_FIRING_POWER,
10814 &flashFiringLevel, 1);
10815
10816 /* lens */
10817 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10818 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10819
10820 if (gCamCapability[mCameraId]->filter_densities_count) {
10821 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10822 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10823 gCamCapability[mCameraId]->filter_densities_count);
10824 }
10825
10826 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10827 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10828
Thierry Strudel3d639192016-09-09 11:52:26 -070010829 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10830 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10831
10832 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10833 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10834
10835 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10836 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10837
10838 /* face detection (default to OFF) */
10839 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10840 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10841
Thierry Strudel54dc9782017-02-15 12:12:10 -080010842 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10843 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010844
10845 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10846 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10847
10848 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10849 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10850
Thierry Strudel3d639192016-09-09 11:52:26 -070010851
10852 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10853 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10854
10855 /* Exposure time(Update the Min Exposure Time)*/
10856 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10857 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10858
10859 /* frame duration */
10860 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10861 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10862
10863 /* sensitivity */
10864 static const int32_t default_sensitivity = 100;
10865 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010866#ifndef USE_HAL_3_3
10867 static const int32_t default_isp_sensitivity =
10868 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10869 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10870#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010871
10872 /*edge mode*/
10873 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10874
10875 /*noise reduction mode*/
10876 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10877
10878 /*color correction mode*/
10879 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10880 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10881
10882 /*transform matrix mode*/
10883 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10884
10885 int32_t scaler_crop_region[4];
10886 scaler_crop_region[0] = 0;
10887 scaler_crop_region[1] = 0;
10888 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10889 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10890 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10891
10892 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10893 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10894
10895 /*focus distance*/
10896 float focus_distance = 0.0;
10897 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10898
10899 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010900 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010901 float max_range = 0.0;
10902 float max_fixed_fps = 0.0;
10903 int32_t fps_range[2] = {0, 0};
10904 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10905 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010906 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10907 TEMPLATE_MAX_PREVIEW_FPS) {
10908 continue;
10909 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010910 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10911 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10912 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10913 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10914 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10915 if (range > max_range) {
10916 fps_range[0] =
10917 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10918 fps_range[1] =
10919 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10920 max_range = range;
10921 }
10922 } else {
10923 if (range < 0.01 && max_fixed_fps <
10924 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10925 fps_range[0] =
10926 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10927 fps_range[1] =
10928 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10929 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10930 }
10931 }
10932 }
10933 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10934
10935 /*precapture trigger*/
10936 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10937 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10938
10939 /*af trigger*/
10940 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10941 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10942
10943 /* ae & af regions */
10944 int32_t active_region[] = {
10945 gCamCapability[mCameraId]->active_array_size.left,
10946 gCamCapability[mCameraId]->active_array_size.top,
10947 gCamCapability[mCameraId]->active_array_size.left +
10948 gCamCapability[mCameraId]->active_array_size.width,
10949 gCamCapability[mCameraId]->active_array_size.top +
10950 gCamCapability[mCameraId]->active_array_size.height,
10951 0};
10952 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10953 sizeof(active_region) / sizeof(active_region[0]));
10954 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10955 sizeof(active_region) / sizeof(active_region[0]));
10956
10957 /* black level lock */
10958 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10959 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10960
Thierry Strudel3d639192016-09-09 11:52:26 -070010961 //special defaults for manual template
10962 if (type == CAMERA3_TEMPLATE_MANUAL) {
10963 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10964 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10965
10966 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10967 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10968
10969 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10970 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10971
10972 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10973 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10974
10975 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10976 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10977
10978 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10979 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10980 }
10981
10982
10983 /* TNR
10984 * We'll use this location to determine which modes TNR will be set.
10985 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10986 * This is not to be confused with linking on a per stream basis that decision
10987 * is still on per-session basis and will be handled as part of config stream
10988 */
10989 uint8_t tnr_enable = 0;
10990
10991 if (m_bTnrPreview || m_bTnrVideo) {
10992
10993 switch (type) {
10994 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10995 tnr_enable = 1;
10996 break;
10997
10998 default:
10999 tnr_enable = 0;
11000 break;
11001 }
11002
11003 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11004 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11005 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11006
11007 LOGD("TNR:%d with process plate %d for template:%d",
11008 tnr_enable, tnr_process_type, type);
11009 }
11010
11011 //Update Link tags to default
11012 int32_t sync_type = CAM_TYPE_STANDALONE;
11013 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11014
11015 int32_t is_main = 0; //this doesn't matter as app should overwrite
11016 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11017
11018 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11019
11020 /* CDS default */
11021 char prop[PROPERTY_VALUE_MAX];
11022 memset(prop, 0, sizeof(prop));
11023 property_get("persist.camera.CDS", prop, "Auto");
11024 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11025 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11026 if (CAM_CDS_MODE_MAX == cds_mode) {
11027 cds_mode = CAM_CDS_MODE_AUTO;
11028 }
11029
11030 /* Disabling CDS in templates which have TNR enabled*/
11031 if (tnr_enable)
11032 cds_mode = CAM_CDS_MODE_OFF;
11033
11034 int32_t mode = cds_mode;
11035 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011036
Thierry Strudel269c81a2016-10-12 12:13:59 -070011037 /* Manual Convergence AEC Speed is disabled by default*/
11038 float default_aec_speed = 0;
11039 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11040
11041 /* Manual Convergence AWB Speed is disabled by default*/
11042 float default_awb_speed = 0;
11043 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11044
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011045 // Set instant AEC to normal convergence by default
11046 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11047 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11048
Shuzhen Wang19463d72016-03-08 11:09:52 -080011049 /* hybrid ae */
11050 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11051
Thierry Strudel3d639192016-09-09 11:52:26 -070011052 mDefaultMetadata[type] = settings.release();
11053
11054 return mDefaultMetadata[type];
11055}
11056
11057/*===========================================================================
11058 * FUNCTION : setFrameParameters
11059 *
11060 * DESCRIPTION: set parameters per frame as requested in the metadata from
11061 * framework
11062 *
11063 * PARAMETERS :
11064 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011065 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011066 * @blob_request: Whether this request is a blob request or not
11067 *
11068 * RETURN : success: NO_ERROR
11069 * failure:
11070 *==========================================================================*/
11071int QCamera3HardwareInterface::setFrameParameters(
11072 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011073 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011074 int blob_request,
11075 uint32_t snapshotStreamId)
11076{
11077 /*translate from camera_metadata_t type to parm_type_t*/
11078 int rc = 0;
11079 int32_t hal_version = CAM_HAL_V3;
11080
11081 clear_metadata_buffer(mParameters);
11082 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11083 LOGE("Failed to set hal version in the parameters");
11084 return BAD_VALUE;
11085 }
11086
11087 /*we need to update the frame number in the parameters*/
11088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11089 request->frame_number)) {
11090 LOGE("Failed to set the frame number in the parameters");
11091 return BAD_VALUE;
11092 }
11093
11094 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011096 LOGE("Failed to set stream type mask in the parameters");
11097 return BAD_VALUE;
11098 }
11099
11100 if (mUpdateDebugLevel) {
11101 uint32_t dummyDebugLevel = 0;
11102 /* The value of dummyDebugLevel is irrelavent. On
11103 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11105 dummyDebugLevel)) {
11106 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11107 return BAD_VALUE;
11108 }
11109 mUpdateDebugLevel = false;
11110 }
11111
11112 if(request->settings != NULL){
11113 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11114 if (blob_request)
11115 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11116 }
11117
11118 return rc;
11119}
11120
11121/*===========================================================================
11122 * FUNCTION : setReprocParameters
11123 *
11124 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11125 * return it.
11126 *
11127 * PARAMETERS :
11128 * @request : request that needs to be serviced
11129 *
11130 * RETURN : success: NO_ERROR
11131 * failure:
11132 *==========================================================================*/
11133int32_t QCamera3HardwareInterface::setReprocParameters(
11134 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11135 uint32_t snapshotStreamId)
11136{
11137 /*translate from camera_metadata_t type to parm_type_t*/
11138 int rc = 0;
11139
11140 if (NULL == request->settings){
11141 LOGE("Reprocess settings cannot be NULL");
11142 return BAD_VALUE;
11143 }
11144
11145 if (NULL == reprocParam) {
11146 LOGE("Invalid reprocessing metadata buffer");
11147 return BAD_VALUE;
11148 }
11149 clear_metadata_buffer(reprocParam);
11150
11151 /*we need to update the frame number in the parameters*/
11152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11153 request->frame_number)) {
11154 LOGE("Failed to set the frame number in the parameters");
11155 return BAD_VALUE;
11156 }
11157
11158 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11159 if (rc < 0) {
11160 LOGE("Failed to translate reproc request");
11161 return rc;
11162 }
11163
11164 CameraMetadata frame_settings;
11165 frame_settings = request->settings;
11166 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11167 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11168 int32_t *crop_count =
11169 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11170 int32_t *crop_data =
11171 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11172 int32_t *roi_map =
11173 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11174 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11175 cam_crop_data_t crop_meta;
11176 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11177 crop_meta.num_of_streams = 1;
11178 crop_meta.crop_info[0].crop.left = crop_data[0];
11179 crop_meta.crop_info[0].crop.top = crop_data[1];
11180 crop_meta.crop_info[0].crop.width = crop_data[2];
11181 crop_meta.crop_info[0].crop.height = crop_data[3];
11182
11183 crop_meta.crop_info[0].roi_map.left =
11184 roi_map[0];
11185 crop_meta.crop_info[0].roi_map.top =
11186 roi_map[1];
11187 crop_meta.crop_info[0].roi_map.width =
11188 roi_map[2];
11189 crop_meta.crop_info[0].roi_map.height =
11190 roi_map[3];
11191
11192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11193 rc = BAD_VALUE;
11194 }
11195 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11196 request->input_buffer->stream,
11197 crop_meta.crop_info[0].crop.left,
11198 crop_meta.crop_info[0].crop.top,
11199 crop_meta.crop_info[0].crop.width,
11200 crop_meta.crop_info[0].crop.height);
11201 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11202 request->input_buffer->stream,
11203 crop_meta.crop_info[0].roi_map.left,
11204 crop_meta.crop_info[0].roi_map.top,
11205 crop_meta.crop_info[0].roi_map.width,
11206 crop_meta.crop_info[0].roi_map.height);
11207 } else {
11208 LOGE("Invalid reprocess crop count %d!", *crop_count);
11209 }
11210 } else {
11211 LOGE("No crop data from matching output stream");
11212 }
11213
11214 /* These settings are not needed for regular requests so handle them specially for
11215 reprocess requests; information needed for EXIF tags */
11216 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11217 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11218 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11219 if (NAME_NOT_FOUND != val) {
11220 uint32_t flashMode = (uint32_t)val;
11221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11222 rc = BAD_VALUE;
11223 }
11224 } else {
11225 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11226 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11227 }
11228 } else {
11229 LOGH("No flash mode in reprocess settings");
11230 }
11231
11232 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11233 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11235 rc = BAD_VALUE;
11236 }
11237 } else {
11238 LOGH("No flash state in reprocess settings");
11239 }
11240
11241 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11242 uint8_t *reprocessFlags =
11243 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11245 *reprocessFlags)) {
11246 rc = BAD_VALUE;
11247 }
11248 }
11249
Thierry Strudel54dc9782017-02-15 12:12:10 -080011250 // Add exif debug data to internal metadata
11251 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11252 mm_jpeg_debug_exif_params_t *debug_params =
11253 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11254 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11255 // AE
11256 if (debug_params->ae_debug_params_valid == TRUE) {
11257 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11258 debug_params->ae_debug_params);
11259 }
11260 // AWB
11261 if (debug_params->awb_debug_params_valid == TRUE) {
11262 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11263 debug_params->awb_debug_params);
11264 }
11265 // AF
11266 if (debug_params->af_debug_params_valid == TRUE) {
11267 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11268 debug_params->af_debug_params);
11269 }
11270 // ASD
11271 if (debug_params->asd_debug_params_valid == TRUE) {
11272 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11273 debug_params->asd_debug_params);
11274 }
11275 // Stats
11276 if (debug_params->stats_debug_params_valid == TRUE) {
11277 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11278 debug_params->stats_debug_params);
11279 }
11280 // BE Stats
11281 if (debug_params->bestats_debug_params_valid == TRUE) {
11282 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11283 debug_params->bestats_debug_params);
11284 }
11285 // BHIST
11286 if (debug_params->bhist_debug_params_valid == TRUE) {
11287 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11288 debug_params->bhist_debug_params);
11289 }
11290 // 3A Tuning
11291 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11292 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11293 debug_params->q3a_tuning_debug_params);
11294 }
11295 }
11296
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011297 // Add metadata which reprocess needs
11298 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11299 cam_reprocess_info_t *repro_info =
11300 (cam_reprocess_info_t *)frame_settings.find
11301 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011302 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011303 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011304 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011305 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011306 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011307 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011308 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011309 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011310 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011311 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011312 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011313 repro_info->pipeline_flip);
11314 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11315 repro_info->af_roi);
11316 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11317 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011318 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11319 CAM_INTF_PARM_ROTATION metadata then has been added in
11320 translateToHalMetadata. HAL need to keep this new rotation
11321 metadata. Otherwise, the old rotation info saved in the vendor tag
11322 would be used */
11323 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11324 CAM_INTF_PARM_ROTATION, reprocParam) {
11325 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11326 } else {
11327 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011328 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011329 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011330 }
11331
11332 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11333 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11334 roi.width and roi.height would be the final JPEG size.
11335 For now, HAL only checks this for reprocess request */
11336 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11337 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11338 uint8_t *enable =
11339 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11340 if (*enable == TRUE) {
11341 int32_t *crop_data =
11342 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11343 cam_stream_crop_info_t crop_meta;
11344 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11345 crop_meta.stream_id = 0;
11346 crop_meta.crop.left = crop_data[0];
11347 crop_meta.crop.top = crop_data[1];
11348 crop_meta.crop.width = crop_data[2];
11349 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011350 // The JPEG crop roi should match cpp output size
11351 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11352 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11353 crop_meta.roi_map.left = 0;
11354 crop_meta.roi_map.top = 0;
11355 crop_meta.roi_map.width = cpp_crop->crop.width;
11356 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011357 }
11358 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11359 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011360 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011361 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011362 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11363 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011364 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011365 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11366
11367 // Add JPEG scale information
11368 cam_dimension_t scale_dim;
11369 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11370 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11371 int32_t *roi =
11372 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11373 scale_dim.width = roi[2];
11374 scale_dim.height = roi[3];
11375 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11376 scale_dim);
11377 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11378 scale_dim.width, scale_dim.height, mCameraId);
11379 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011380 }
11381 }
11382
11383 return rc;
11384}
11385
11386/*===========================================================================
11387 * FUNCTION : saveRequestSettings
11388 *
11389 * DESCRIPTION: Add any settings that might have changed to the request settings
11390 * and save the settings to be applied on the frame
11391 *
11392 * PARAMETERS :
11393 * @jpegMetadata : the extracted and/or modified jpeg metadata
11394 * @request : request with initial settings
11395 *
11396 * RETURN :
11397 * camera_metadata_t* : pointer to the saved request settings
11398 *==========================================================================*/
11399camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11400 const CameraMetadata &jpegMetadata,
11401 camera3_capture_request_t *request)
11402{
11403 camera_metadata_t *resultMetadata;
11404 CameraMetadata camMetadata;
11405 camMetadata = request->settings;
11406
11407 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11408 int32_t thumbnail_size[2];
11409 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11410 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11411 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11412 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11413 }
11414
11415 if (request->input_buffer != NULL) {
11416 uint8_t reprocessFlags = 1;
11417 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11418 (uint8_t*)&reprocessFlags,
11419 sizeof(reprocessFlags));
11420 }
11421
11422 resultMetadata = camMetadata.release();
11423 return resultMetadata;
11424}
11425
11426/*===========================================================================
11427 * FUNCTION : setHalFpsRange
11428 *
11429 * DESCRIPTION: set FPS range parameter
11430 *
11431 *
11432 * PARAMETERS :
11433 * @settings : Metadata from framework
11434 * @hal_metadata: Metadata buffer
11435 *
11436 *
11437 * RETURN : success: NO_ERROR
11438 * failure:
11439 *==========================================================================*/
11440int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11441 metadata_buffer_t *hal_metadata)
11442{
11443 int32_t rc = NO_ERROR;
11444 cam_fps_range_t fps_range;
11445 fps_range.min_fps = (float)
11446 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11447 fps_range.max_fps = (float)
11448 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11449 fps_range.video_min_fps = fps_range.min_fps;
11450 fps_range.video_max_fps = fps_range.max_fps;
11451
11452 LOGD("aeTargetFpsRange fps: [%f %f]",
11453 fps_range.min_fps, fps_range.max_fps);
11454 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11455 * follows:
11456 * ---------------------------------------------------------------|
11457 * Video stream is absent in configure_streams |
11458 * (Camcorder preview before the first video record |
11459 * ---------------------------------------------------------------|
11460 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11461 * | | | vid_min/max_fps|
11462 * ---------------------------------------------------------------|
11463 * NO | [ 30, 240] | 240 | [240, 240] |
11464 * |-------------|-------------|----------------|
11465 * | [240, 240] | 240 | [240, 240] |
11466 * ---------------------------------------------------------------|
11467 * Video stream is present in configure_streams |
11468 * ---------------------------------------------------------------|
11469 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11470 * | | | vid_min/max_fps|
11471 * ---------------------------------------------------------------|
11472 * NO | [ 30, 240] | 240 | [240, 240] |
11473 * (camcorder prev |-------------|-------------|----------------|
11474 * after video rec | [240, 240] | 240 | [240, 240] |
11475 * is stopped) | | | |
11476 * ---------------------------------------------------------------|
11477 * YES | [ 30, 240] | 240 | [240, 240] |
11478 * |-------------|-------------|----------------|
11479 * | [240, 240] | 240 | [240, 240] |
11480 * ---------------------------------------------------------------|
11481 * When Video stream is absent in configure_streams,
11482 * preview fps = sensor_fps / batchsize
11483 * Eg: for 240fps at batchSize 4, preview = 60fps
11484 * for 120fps at batchSize 4, preview = 30fps
11485 *
11486 * When video stream is present in configure_streams, preview fps is as per
11487 * the ratio of preview buffers to video buffers requested in process
11488 * capture request
11489 */
11490 mBatchSize = 0;
11491 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11492 fps_range.min_fps = fps_range.video_max_fps;
11493 fps_range.video_min_fps = fps_range.video_max_fps;
11494 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11495 fps_range.max_fps);
11496 if (NAME_NOT_FOUND != val) {
11497 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11499 return BAD_VALUE;
11500 }
11501
11502 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11503 /* If batchmode is currently in progress and the fps changes,
11504 * set the flag to restart the sensor */
11505 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11506 (mHFRVideoFps != fps_range.max_fps)) {
11507 mNeedSensorRestart = true;
11508 }
11509 mHFRVideoFps = fps_range.max_fps;
11510 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11511 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11512 mBatchSize = MAX_HFR_BATCH_SIZE;
11513 }
11514 }
11515 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11516
11517 }
11518 } else {
11519 /* HFR mode is session param in backend/ISP. This should be reset when
11520 * in non-HFR mode */
11521 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11523 return BAD_VALUE;
11524 }
11525 }
11526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11527 return BAD_VALUE;
11528 }
11529 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11530 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11531 return rc;
11532}
11533
11534/*===========================================================================
11535 * FUNCTION : translateToHalMetadata
11536 *
11537 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11538 *
11539 *
11540 * PARAMETERS :
11541 * @request : request sent from framework
11542 *
11543 *
11544 * RETURN : success: NO_ERROR
11545 * failure:
11546 *==========================================================================*/
11547int QCamera3HardwareInterface::translateToHalMetadata
11548 (const camera3_capture_request_t *request,
11549 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011550 uint32_t snapshotStreamId) {
11551 if (request == nullptr || hal_metadata == nullptr) {
11552 return BAD_VALUE;
11553 }
11554
11555 int64_t minFrameDuration = getMinFrameDuration(request);
11556
11557 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11558 minFrameDuration);
11559}
11560
11561int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11562 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11563 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11564
Thierry Strudel3d639192016-09-09 11:52:26 -070011565 int rc = 0;
11566 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011567 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011568
11569 /* Do not change the order of the following list unless you know what you are
11570 * doing.
11571 * The order is laid out in such a way that parameters in the front of the table
11572 * may be used to override the parameters later in the table. Examples are:
11573 * 1. META_MODE should precede AEC/AWB/AF MODE
11574 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11575 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11576 * 4. Any mode should precede it's corresponding settings
11577 */
11578 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11579 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11581 rc = BAD_VALUE;
11582 }
11583 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11584 if (rc != NO_ERROR) {
11585 LOGE("extractSceneMode failed");
11586 }
11587 }
11588
11589 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11590 uint8_t fwk_aeMode =
11591 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11592 uint8_t aeMode;
11593 int32_t redeye;
11594
11595 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11596 aeMode = CAM_AE_MODE_OFF;
11597 } else {
11598 aeMode = CAM_AE_MODE_ON;
11599 }
11600 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11601 redeye = 1;
11602 } else {
11603 redeye = 0;
11604 }
11605
11606 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11607 fwk_aeMode);
11608 if (NAME_NOT_FOUND != val) {
11609 int32_t flashMode = (int32_t)val;
11610 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11611 }
11612
11613 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11615 rc = BAD_VALUE;
11616 }
11617 }
11618
11619 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11620 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11621 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11622 fwk_whiteLevel);
11623 if (NAME_NOT_FOUND != val) {
11624 uint8_t whiteLevel = (uint8_t)val;
11625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11626 rc = BAD_VALUE;
11627 }
11628 }
11629 }
11630
11631 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11632 uint8_t fwk_cacMode =
11633 frame_settings.find(
11634 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11635 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11636 fwk_cacMode);
11637 if (NAME_NOT_FOUND != val) {
11638 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11639 bool entryAvailable = FALSE;
11640 // Check whether Frameworks set CAC mode is supported in device or not
11641 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11642 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11643 entryAvailable = TRUE;
11644 break;
11645 }
11646 }
11647 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11648 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11649 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11650 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11651 if (entryAvailable == FALSE) {
11652 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11653 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11654 } else {
11655 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11656 // High is not supported and so set the FAST as spec say's underlying
11657 // device implementation can be the same for both modes.
11658 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11659 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11660 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11661 // in order to avoid the fps drop due to high quality
11662 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11663 } else {
11664 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11665 }
11666 }
11667 }
11668 LOGD("Final cacMode is %d", cacMode);
11669 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11670 rc = BAD_VALUE;
11671 }
11672 } else {
11673 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11674 }
11675 }
11676
Thierry Strudel2896d122017-02-23 19:18:03 -080011677 char af_value[PROPERTY_VALUE_MAX];
11678 property_get("persist.camera.af.infinity", af_value, "0");
11679
Jason Lee84ae9972017-02-24 13:24:24 -080011680 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011681 if (atoi(af_value) == 0) {
11682 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011683 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011684 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11685 fwk_focusMode);
11686 if (NAME_NOT_FOUND != val) {
11687 uint8_t focusMode = (uint8_t)val;
11688 LOGD("set focus mode %d", focusMode);
11689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11690 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11691 rc = BAD_VALUE;
11692 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011693 }
11694 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011695 } else {
11696 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11697 LOGE("Focus forced to infinity %d", focusMode);
11698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11699 rc = BAD_VALUE;
11700 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011701 }
11702
Jason Lee84ae9972017-02-24 13:24:24 -080011703 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11704 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011705 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11706 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11707 focalDistance)) {
11708 rc = BAD_VALUE;
11709 }
11710 }
11711
11712 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11713 uint8_t fwk_antibandingMode =
11714 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11715 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11716 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11717 if (NAME_NOT_FOUND != val) {
11718 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011719 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11720 if (m60HzZone) {
11721 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11722 } else {
11723 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11724 }
11725 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11727 hal_antibandingMode)) {
11728 rc = BAD_VALUE;
11729 }
11730 }
11731 }
11732
11733 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11734 int32_t expCompensation = frame_settings.find(
11735 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11736 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11737 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11738 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11739 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011740 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11742 expCompensation)) {
11743 rc = BAD_VALUE;
11744 }
11745 }
11746
11747 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11748 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11750 rc = BAD_VALUE;
11751 }
11752 }
11753 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11754 rc = setHalFpsRange(frame_settings, hal_metadata);
11755 if (rc != NO_ERROR) {
11756 LOGE("setHalFpsRange failed");
11757 }
11758 }
11759
11760 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11761 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11763 rc = BAD_VALUE;
11764 }
11765 }
11766
11767 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11768 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11769 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11770 fwk_effectMode);
11771 if (NAME_NOT_FOUND != val) {
11772 uint8_t effectMode = (uint8_t)val;
11773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11774 rc = BAD_VALUE;
11775 }
11776 }
11777 }
11778
11779 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11780 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11782 colorCorrectMode)) {
11783 rc = BAD_VALUE;
11784 }
11785 }
11786
11787 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11788 cam_color_correct_gains_t colorCorrectGains;
11789 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11790 colorCorrectGains.gains[i] =
11791 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11792 }
11793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11794 colorCorrectGains)) {
11795 rc = BAD_VALUE;
11796 }
11797 }
11798
11799 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11800 cam_color_correct_matrix_t colorCorrectTransform;
11801 cam_rational_type_t transform_elem;
11802 size_t num = 0;
11803 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11804 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11805 transform_elem.numerator =
11806 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11807 transform_elem.denominator =
11808 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11809 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11810 num++;
11811 }
11812 }
11813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11814 colorCorrectTransform)) {
11815 rc = BAD_VALUE;
11816 }
11817 }
11818
11819 cam_trigger_t aecTrigger;
11820 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11821 aecTrigger.trigger_id = -1;
11822 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11823 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11824 aecTrigger.trigger =
11825 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11826 aecTrigger.trigger_id =
11827 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11829 aecTrigger)) {
11830 rc = BAD_VALUE;
11831 }
11832 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11833 aecTrigger.trigger, aecTrigger.trigger_id);
11834 }
11835
11836 /*af_trigger must come with a trigger id*/
11837 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11838 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11839 cam_trigger_t af_trigger;
11840 af_trigger.trigger =
11841 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11842 af_trigger.trigger_id =
11843 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11844 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11845 rc = BAD_VALUE;
11846 }
11847 LOGD("AfTrigger: %d AfTriggerID: %d",
11848 af_trigger.trigger, af_trigger.trigger_id);
11849 }
11850
11851 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11852 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11854 rc = BAD_VALUE;
11855 }
11856 }
11857 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11858 cam_edge_application_t edge_application;
11859 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011860
Thierry Strudel3d639192016-09-09 11:52:26 -070011861 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11862 edge_application.sharpness = 0;
11863 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011864 edge_application.sharpness =
11865 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11866 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11867 int32_t sharpness =
11868 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11869 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11870 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11871 LOGD("Setting edge mode sharpness %d", sharpness);
11872 edge_application.sharpness = sharpness;
11873 }
11874 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011875 }
11876 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11877 rc = BAD_VALUE;
11878 }
11879 }
11880
11881 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11882 int32_t respectFlashMode = 1;
11883 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11884 uint8_t fwk_aeMode =
11885 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11886 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11887 respectFlashMode = 0;
11888 LOGH("AE Mode controls flash, ignore android.flash.mode");
11889 }
11890 }
11891 if (respectFlashMode) {
11892 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11893 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11894 LOGH("flash mode after mapping %d", val);
11895 // To check: CAM_INTF_META_FLASH_MODE usage
11896 if (NAME_NOT_FOUND != val) {
11897 uint8_t flashMode = (uint8_t)val;
11898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11899 rc = BAD_VALUE;
11900 }
11901 }
11902 }
11903 }
11904
11905 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11906 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11908 rc = BAD_VALUE;
11909 }
11910 }
11911
11912 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11913 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11915 flashFiringTime)) {
11916 rc = BAD_VALUE;
11917 }
11918 }
11919
11920 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11921 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11923 hotPixelMode)) {
11924 rc = BAD_VALUE;
11925 }
11926 }
11927
11928 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11929 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11931 lensAperture)) {
11932 rc = BAD_VALUE;
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11937 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11939 filterDensity)) {
11940 rc = BAD_VALUE;
11941 }
11942 }
11943
11944 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11945 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11947 focalLength)) {
11948 rc = BAD_VALUE;
11949 }
11950 }
11951
11952 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11953 uint8_t optStabMode =
11954 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11955 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11956 optStabMode)) {
11957 rc = BAD_VALUE;
11958 }
11959 }
11960
11961 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11962 uint8_t videoStabMode =
11963 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11964 LOGD("videoStabMode from APP = %d", videoStabMode);
11965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11966 videoStabMode)) {
11967 rc = BAD_VALUE;
11968 }
11969 }
11970
11971
11972 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11973 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11975 noiseRedMode)) {
11976 rc = BAD_VALUE;
11977 }
11978 }
11979
11980 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11981 float reprocessEffectiveExposureFactor =
11982 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11984 reprocessEffectiveExposureFactor)) {
11985 rc = BAD_VALUE;
11986 }
11987 }
11988
11989 cam_crop_region_t scalerCropRegion;
11990 bool scalerCropSet = false;
11991 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11992 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11993 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11994 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11995 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11996
11997 // Map coordinate system from active array to sensor output.
11998 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11999 scalerCropRegion.width, scalerCropRegion.height);
12000
12001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12002 scalerCropRegion)) {
12003 rc = BAD_VALUE;
12004 }
12005 scalerCropSet = true;
12006 }
12007
12008 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12009 int64_t sensorExpTime =
12010 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12011 LOGD("setting sensorExpTime %lld", sensorExpTime);
12012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12013 sensorExpTime)) {
12014 rc = BAD_VALUE;
12015 }
12016 }
12017
12018 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12019 int64_t sensorFrameDuration =
12020 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012021 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12022 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12023 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12024 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12026 sensorFrameDuration)) {
12027 rc = BAD_VALUE;
12028 }
12029 }
12030
12031 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12032 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12033 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12034 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12035 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12036 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12037 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12039 sensorSensitivity)) {
12040 rc = BAD_VALUE;
12041 }
12042 }
12043
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012044#ifndef USE_HAL_3_3
12045 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12046 int32_t ispSensitivity =
12047 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12048 if (ispSensitivity <
12049 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12050 ispSensitivity =
12051 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12052 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12053 }
12054 if (ispSensitivity >
12055 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12056 ispSensitivity =
12057 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12058 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12059 }
12060 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12061 ispSensitivity)) {
12062 rc = BAD_VALUE;
12063 }
12064 }
12065#endif
12066
Thierry Strudel3d639192016-09-09 11:52:26 -070012067 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12068 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12069 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12070 rc = BAD_VALUE;
12071 }
12072 }
12073
12074 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12075 uint8_t fwk_facedetectMode =
12076 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12077
12078 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12079 fwk_facedetectMode);
12080
12081 if (NAME_NOT_FOUND != val) {
12082 uint8_t facedetectMode = (uint8_t)val;
12083 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12084 facedetectMode)) {
12085 rc = BAD_VALUE;
12086 }
12087 }
12088 }
12089
Thierry Strudel54dc9782017-02-15 12:12:10 -080012090 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012091 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012092 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12094 histogramMode)) {
12095 rc = BAD_VALUE;
12096 }
12097 }
12098
12099 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12100 uint8_t sharpnessMapMode =
12101 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12103 sharpnessMapMode)) {
12104 rc = BAD_VALUE;
12105 }
12106 }
12107
12108 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12109 uint8_t tonemapMode =
12110 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12112 rc = BAD_VALUE;
12113 }
12114 }
12115 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12116 /*All tonemap channels will have the same number of points*/
12117 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12118 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12119 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12120 cam_rgb_tonemap_curves tonemapCurves;
12121 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12122 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12123 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12124 tonemapCurves.tonemap_points_cnt,
12125 CAM_MAX_TONEMAP_CURVE_SIZE);
12126 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12127 }
12128
12129 /* ch0 = G*/
12130 size_t point = 0;
12131 cam_tonemap_curve_t tonemapCurveGreen;
12132 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12133 for (size_t j = 0; j < 2; j++) {
12134 tonemapCurveGreen.tonemap_points[i][j] =
12135 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12136 point++;
12137 }
12138 }
12139 tonemapCurves.curves[0] = tonemapCurveGreen;
12140
12141 /* ch 1 = B */
12142 point = 0;
12143 cam_tonemap_curve_t tonemapCurveBlue;
12144 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12145 for (size_t j = 0; j < 2; j++) {
12146 tonemapCurveBlue.tonemap_points[i][j] =
12147 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12148 point++;
12149 }
12150 }
12151 tonemapCurves.curves[1] = tonemapCurveBlue;
12152
12153 /* ch 2 = R */
12154 point = 0;
12155 cam_tonemap_curve_t tonemapCurveRed;
12156 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12157 for (size_t j = 0; j < 2; j++) {
12158 tonemapCurveRed.tonemap_points[i][j] =
12159 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12160 point++;
12161 }
12162 }
12163 tonemapCurves.curves[2] = tonemapCurveRed;
12164
12165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12166 tonemapCurves)) {
12167 rc = BAD_VALUE;
12168 }
12169 }
12170
12171 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12172 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12173 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12174 captureIntent)) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178
12179 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12180 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12182 blackLevelLock)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12188 uint8_t lensShadingMapMode =
12189 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12191 lensShadingMapMode)) {
12192 rc = BAD_VALUE;
12193 }
12194 }
12195
12196 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12197 cam_area_t roi;
12198 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012199 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012200
12201 // Map coordinate system from active array to sensor output.
12202 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12203 roi.rect.height);
12204
12205 if (scalerCropSet) {
12206 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12207 }
12208 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12209 rc = BAD_VALUE;
12210 }
12211 }
12212
12213 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12214 cam_area_t roi;
12215 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012216 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012217
12218 // Map coordinate system from active array to sensor output.
12219 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12220 roi.rect.height);
12221
12222 if (scalerCropSet) {
12223 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12224 }
12225 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 // CDS for non-HFR non-video mode
12231 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12232 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12233 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12234 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12235 LOGE("Invalid CDS mode %d!", *fwk_cds);
12236 } else {
12237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12238 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242 }
12243
Thierry Strudel04e026f2016-10-10 11:27:36 -070012244 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012245 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012246 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012247 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12248 }
12249 if (m_bVideoHdrEnabled)
12250 vhdr = CAM_VIDEO_HDR_MODE_ON;
12251
Thierry Strudel54dc9782017-02-15 12:12:10 -080012252 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12253
12254 if(vhdr != curr_hdr_state)
12255 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12256
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012257 rc = setVideoHdrMode(mParameters, vhdr);
12258 if (rc != NO_ERROR) {
12259 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012260 }
12261
12262 //IR
12263 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12264 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12265 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012266 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12267 uint8_t isIRon = 0;
12268
12269 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012270 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12271 LOGE("Invalid IR mode %d!", fwk_ir);
12272 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012273 if(isIRon != curr_ir_state )
12274 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12275
Thierry Strudel04e026f2016-10-10 11:27:36 -070012276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12277 CAM_INTF_META_IR_MODE, fwk_ir)) {
12278 rc = BAD_VALUE;
12279 }
12280 }
12281 }
12282
Thierry Strudel54dc9782017-02-15 12:12:10 -080012283 //Binning Correction Mode
12284 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12285 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12286 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12287 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12288 || (0 > fwk_binning_correction)) {
12289 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12290 } else {
12291 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12292 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12293 rc = BAD_VALUE;
12294 }
12295 }
12296 }
12297
Thierry Strudel269c81a2016-10-12 12:13:59 -070012298 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12299 float aec_speed;
12300 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12301 LOGD("AEC Speed :%f", aec_speed);
12302 if ( aec_speed < 0 ) {
12303 LOGE("Invalid AEC mode %f!", aec_speed);
12304 } else {
12305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12306 aec_speed)) {
12307 rc = BAD_VALUE;
12308 }
12309 }
12310 }
12311
12312 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12313 float awb_speed;
12314 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12315 LOGD("AWB Speed :%f", awb_speed);
12316 if ( awb_speed < 0 ) {
12317 LOGE("Invalid AWB mode %f!", awb_speed);
12318 } else {
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12320 awb_speed)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324 }
12325
Thierry Strudel3d639192016-09-09 11:52:26 -070012326 // TNR
12327 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12328 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12329 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012330 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012331 cam_denoise_param_t tnr;
12332 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12333 tnr.process_plates =
12334 (cam_denoise_process_type_t)frame_settings.find(
12335 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12336 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012337
12338 if(b_TnrRequested != curr_tnr_state)
12339 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12340
Thierry Strudel3d639192016-09-09 11:52:26 -070012341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12342 rc = BAD_VALUE;
12343 }
12344 }
12345
Thierry Strudel54dc9782017-02-15 12:12:10 -080012346 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012347 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012348 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12350 *exposure_metering_mode)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354
Thierry Strudel3d639192016-09-09 11:52:26 -070012355 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12356 int32_t fwk_testPatternMode =
12357 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12358 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12359 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12360
12361 if (NAME_NOT_FOUND != testPatternMode) {
12362 cam_test_pattern_data_t testPatternData;
12363 memset(&testPatternData, 0, sizeof(testPatternData));
12364 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12365 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12366 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12367 int32_t *fwk_testPatternData =
12368 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12369 testPatternData.r = fwk_testPatternData[0];
12370 testPatternData.b = fwk_testPatternData[3];
12371 switch (gCamCapability[mCameraId]->color_arrangement) {
12372 case CAM_FILTER_ARRANGEMENT_RGGB:
12373 case CAM_FILTER_ARRANGEMENT_GRBG:
12374 testPatternData.gr = fwk_testPatternData[1];
12375 testPatternData.gb = fwk_testPatternData[2];
12376 break;
12377 case CAM_FILTER_ARRANGEMENT_GBRG:
12378 case CAM_FILTER_ARRANGEMENT_BGGR:
12379 testPatternData.gr = fwk_testPatternData[2];
12380 testPatternData.gb = fwk_testPatternData[1];
12381 break;
12382 default:
12383 LOGE("color arrangement %d is not supported",
12384 gCamCapability[mCameraId]->color_arrangement);
12385 break;
12386 }
12387 }
12388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12389 testPatternData)) {
12390 rc = BAD_VALUE;
12391 }
12392 } else {
12393 LOGE("Invalid framework sensor test pattern mode %d",
12394 fwk_testPatternMode);
12395 }
12396 }
12397
12398 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12399 size_t count = 0;
12400 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12401 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12402 gps_coords.data.d, gps_coords.count, count);
12403 if (gps_coords.count != count) {
12404 rc = BAD_VALUE;
12405 }
12406 }
12407
12408 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12409 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12410 size_t count = 0;
12411 const char *gps_methods_src = (const char *)
12412 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12413 memset(gps_methods, '\0', sizeof(gps_methods));
12414 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12415 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12416 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12417 if (GPS_PROCESSING_METHOD_SIZE != count) {
12418 rc = BAD_VALUE;
12419 }
12420 }
12421
12422 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12423 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12424 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12425 gps_timestamp)) {
12426 rc = BAD_VALUE;
12427 }
12428 }
12429
12430 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12431 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12432 cam_rotation_info_t rotation_info;
12433 if (orientation == 0) {
12434 rotation_info.rotation = ROTATE_0;
12435 } else if (orientation == 90) {
12436 rotation_info.rotation = ROTATE_90;
12437 } else if (orientation == 180) {
12438 rotation_info.rotation = ROTATE_180;
12439 } else if (orientation == 270) {
12440 rotation_info.rotation = ROTATE_270;
12441 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012442 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012443 rotation_info.streamId = snapshotStreamId;
12444 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12446 rc = BAD_VALUE;
12447 }
12448 }
12449
12450 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12451 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12453 rc = BAD_VALUE;
12454 }
12455 }
12456
12457 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12458 uint32_t thumb_quality = (uint32_t)
12459 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12461 thumb_quality)) {
12462 rc = BAD_VALUE;
12463 }
12464 }
12465
12466 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12467 cam_dimension_t dim;
12468 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12469 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12470 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12471 rc = BAD_VALUE;
12472 }
12473 }
12474
12475 // Internal metadata
12476 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12477 size_t count = 0;
12478 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12479 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12480 privatedata.data.i32, privatedata.count, count);
12481 if (privatedata.count != count) {
12482 rc = BAD_VALUE;
12483 }
12484 }
12485
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012486 // ISO/Exposure Priority
12487 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12488 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12489 cam_priority_mode_t mode =
12490 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12491 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12492 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12493 use_iso_exp_pty.previewOnly = FALSE;
12494 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12495 use_iso_exp_pty.value = *ptr;
12496
12497 if(CAM_ISO_PRIORITY == mode) {
12498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12499 use_iso_exp_pty)) {
12500 rc = BAD_VALUE;
12501 }
12502 }
12503 else {
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12505 use_iso_exp_pty)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012509
12510 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514 } else {
12515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12516 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012517 }
12518 }
12519
12520 // Saturation
12521 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12522 int32_t* use_saturation =
12523 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12525 rc = BAD_VALUE;
12526 }
12527 }
12528
Thierry Strudel3d639192016-09-09 11:52:26 -070012529 // EV step
12530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12531 gCamCapability[mCameraId]->exp_compensation_step)) {
12532 rc = BAD_VALUE;
12533 }
12534
12535 // CDS info
12536 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12537 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12538 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12539
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12541 CAM_INTF_META_CDS_DATA, *cdsData)) {
12542 rc = BAD_VALUE;
12543 }
12544 }
12545
Shuzhen Wang19463d72016-03-08 11:09:52 -080012546 // Hybrid AE
12547 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12548 uint8_t *hybrid_ae = (uint8_t *)
12549 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12550
12551 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12552 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
Shuzhen Wang14415f52016-11-16 18:26:18 -080012557 // Histogram
12558 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12559 uint8_t histogramMode =
12560 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12562 histogramMode)) {
12563 rc = BAD_VALUE;
12564 }
12565 }
12566
12567 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12568 int32_t histogramBins =
12569 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12571 histogramBins)) {
12572 rc = BAD_VALUE;
12573 }
12574 }
12575
Thierry Strudel3d639192016-09-09 11:52:26 -070012576 return rc;
12577}
12578
12579/*===========================================================================
12580 * FUNCTION : captureResultCb
12581 *
12582 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12583 *
12584 * PARAMETERS :
12585 * @frame : frame information from mm-camera-interface
12586 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12587 * @userdata: userdata
12588 *
12589 * RETURN : NONE
12590 *==========================================================================*/
12591void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12592 camera3_stream_buffer_t *buffer,
12593 uint32_t frame_number, bool isInputBuffer, void *userdata)
12594{
12595 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12596 if (hw == NULL) {
12597 LOGE("Invalid hw %p", hw);
12598 return;
12599 }
12600
12601 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12602 return;
12603}
12604
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012605/*===========================================================================
12606 * FUNCTION : setBufferErrorStatus
12607 *
12608 * DESCRIPTION: Callback handler for channels to report any buffer errors
12609 *
12610 * PARAMETERS :
12611 * @ch : Channel on which buffer error is reported from
12612 * @frame_number : frame number on which buffer error is reported on
12613 * @buffer_status : buffer error status
12614 * @userdata: userdata
12615 *
12616 * RETURN : NONE
12617 *==========================================================================*/
12618void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12619 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12620{
12621 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12622 if (hw == NULL) {
12623 LOGE("Invalid hw %p", hw);
12624 return;
12625 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012626
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012627 hw->setBufferErrorStatus(ch, frame_number, err);
12628 return;
12629}
12630
12631void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12632 uint32_t frameNumber, camera3_buffer_status_t err)
12633{
12634 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12635 pthread_mutex_lock(&mMutex);
12636
12637 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12638 if (req.frame_number != frameNumber)
12639 continue;
12640 for (auto& k : req.mPendingBufferList) {
12641 if(k.stream->priv == ch) {
12642 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12643 }
12644 }
12645 }
12646
12647 pthread_mutex_unlock(&mMutex);
12648 return;
12649}
Thierry Strudel3d639192016-09-09 11:52:26 -070012650/*===========================================================================
12651 * FUNCTION : initialize
12652 *
12653 * DESCRIPTION: Pass framework callback pointers to HAL
12654 *
12655 * PARAMETERS :
12656 *
12657 *
12658 * RETURN : Success : 0
12659 * Failure: -ENODEV
12660 *==========================================================================*/
12661
12662int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12663 const camera3_callback_ops_t *callback_ops)
12664{
12665 LOGD("E");
12666 QCamera3HardwareInterface *hw =
12667 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12668 if (!hw) {
12669 LOGE("NULL camera device");
12670 return -ENODEV;
12671 }
12672
12673 int rc = hw->initialize(callback_ops);
12674 LOGD("X");
12675 return rc;
12676}
12677
12678/*===========================================================================
12679 * FUNCTION : configure_streams
12680 *
12681 * DESCRIPTION:
12682 *
12683 * PARAMETERS :
12684 *
12685 *
12686 * RETURN : Success: 0
12687 * Failure: -EINVAL (if stream configuration is invalid)
12688 * -ENODEV (fatal error)
12689 *==========================================================================*/
12690
12691int QCamera3HardwareInterface::configure_streams(
12692 const struct camera3_device *device,
12693 camera3_stream_configuration_t *stream_list)
12694{
12695 LOGD("E");
12696 QCamera3HardwareInterface *hw =
12697 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12698 if (!hw) {
12699 LOGE("NULL camera device");
12700 return -ENODEV;
12701 }
12702 int rc = hw->configureStreams(stream_list);
12703 LOGD("X");
12704 return rc;
12705}
12706
12707/*===========================================================================
12708 * FUNCTION : construct_default_request_settings
12709 *
12710 * DESCRIPTION: Configure a settings buffer to meet the required use case
12711 *
12712 * PARAMETERS :
12713 *
12714 *
12715 * RETURN : Success: Return valid metadata
12716 * Failure: Return NULL
12717 *==========================================================================*/
12718const camera_metadata_t* QCamera3HardwareInterface::
12719 construct_default_request_settings(const struct camera3_device *device,
12720 int type)
12721{
12722
12723 LOGD("E");
12724 camera_metadata_t* fwk_metadata = NULL;
12725 QCamera3HardwareInterface *hw =
12726 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12727 if (!hw) {
12728 LOGE("NULL camera device");
12729 return NULL;
12730 }
12731
12732 fwk_metadata = hw->translateCapabilityToMetadata(type);
12733
12734 LOGD("X");
12735 return fwk_metadata;
12736}
12737
12738/*===========================================================================
12739 * FUNCTION : process_capture_request
12740 *
12741 * DESCRIPTION:
12742 *
12743 * PARAMETERS :
12744 *
12745 *
12746 * RETURN :
12747 *==========================================================================*/
12748int QCamera3HardwareInterface::process_capture_request(
12749 const struct camera3_device *device,
12750 camera3_capture_request_t *request)
12751{
12752 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012753 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012754 QCamera3HardwareInterface *hw =
12755 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12756 if (!hw) {
12757 LOGE("NULL camera device");
12758 return -EINVAL;
12759 }
12760
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012761 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012762 LOGD("X");
12763 return rc;
12764}
12765
12766/*===========================================================================
12767 * FUNCTION : dump
12768 *
12769 * DESCRIPTION:
12770 *
12771 * PARAMETERS :
12772 *
12773 *
12774 * RETURN :
12775 *==========================================================================*/
12776
12777void QCamera3HardwareInterface::dump(
12778 const struct camera3_device *device, int fd)
12779{
12780 /* Log level property is read when "adb shell dumpsys media.camera" is
12781 called so that the log level can be controlled without restarting
12782 the media server */
12783 getLogLevel();
12784
12785 LOGD("E");
12786 QCamera3HardwareInterface *hw =
12787 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12788 if (!hw) {
12789 LOGE("NULL camera device");
12790 return;
12791 }
12792
12793 hw->dump(fd);
12794 LOGD("X");
12795 return;
12796}
12797
12798/*===========================================================================
12799 * FUNCTION : flush
12800 *
12801 * DESCRIPTION:
12802 *
12803 * PARAMETERS :
12804 *
12805 *
12806 * RETURN :
12807 *==========================================================================*/
12808
12809int QCamera3HardwareInterface::flush(
12810 const struct camera3_device *device)
12811{
12812 int rc;
12813 LOGD("E");
12814 QCamera3HardwareInterface *hw =
12815 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12816 if (!hw) {
12817 LOGE("NULL camera device");
12818 return -EINVAL;
12819 }
12820
12821 pthread_mutex_lock(&hw->mMutex);
12822 // Validate current state
12823 switch (hw->mState) {
12824 case STARTED:
12825 /* valid state */
12826 break;
12827
12828 case ERROR:
12829 pthread_mutex_unlock(&hw->mMutex);
12830 hw->handleCameraDeviceError();
12831 return -ENODEV;
12832
12833 default:
12834 LOGI("Flush returned during state %d", hw->mState);
12835 pthread_mutex_unlock(&hw->mMutex);
12836 return 0;
12837 }
12838 pthread_mutex_unlock(&hw->mMutex);
12839
12840 rc = hw->flush(true /* restart channels */ );
12841 LOGD("X");
12842 return rc;
12843}
12844
12845/*===========================================================================
12846 * FUNCTION : close_camera_device
12847 *
12848 * DESCRIPTION:
12849 *
12850 * PARAMETERS :
12851 *
12852 *
12853 * RETURN :
12854 *==========================================================================*/
12855int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12856{
12857 int ret = NO_ERROR;
12858 QCamera3HardwareInterface *hw =
12859 reinterpret_cast<QCamera3HardwareInterface *>(
12860 reinterpret_cast<camera3_device_t *>(device)->priv);
12861 if (!hw) {
12862 LOGE("NULL camera device");
12863 return BAD_VALUE;
12864 }
12865
12866 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12867 delete hw;
12868 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012869 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012870 return ret;
12871}
12872
12873/*===========================================================================
12874 * FUNCTION : getWaveletDenoiseProcessPlate
12875 *
12876 * DESCRIPTION: query wavelet denoise process plate
12877 *
12878 * PARAMETERS : None
12879 *
12880 * RETURN : WNR prcocess plate value
12881 *==========================================================================*/
12882cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12883{
12884 char prop[PROPERTY_VALUE_MAX];
12885 memset(prop, 0, sizeof(prop));
12886 property_get("persist.denoise.process.plates", prop, "0");
12887 int processPlate = atoi(prop);
12888 switch(processPlate) {
12889 case 0:
12890 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12891 case 1:
12892 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12893 case 2:
12894 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12895 case 3:
12896 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12897 default:
12898 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12899 }
12900}
12901
12902
12903/*===========================================================================
12904 * FUNCTION : getTemporalDenoiseProcessPlate
12905 *
12906 * DESCRIPTION: query temporal denoise process plate
12907 *
12908 * PARAMETERS : None
12909 *
12910 * RETURN : TNR prcocess plate value
12911 *==========================================================================*/
12912cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12913{
12914 char prop[PROPERTY_VALUE_MAX];
12915 memset(prop, 0, sizeof(prop));
12916 property_get("persist.tnr.process.plates", prop, "0");
12917 int processPlate = atoi(prop);
12918 switch(processPlate) {
12919 case 0:
12920 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12921 case 1:
12922 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12923 case 2:
12924 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12925 case 3:
12926 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12927 default:
12928 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12929 }
12930}
12931
12932
12933/*===========================================================================
12934 * FUNCTION : extractSceneMode
12935 *
12936 * DESCRIPTION: Extract scene mode from frameworks set metadata
12937 *
12938 * PARAMETERS :
12939 * @frame_settings: CameraMetadata reference
12940 * @metaMode: ANDROID_CONTORL_MODE
12941 * @hal_metadata: hal metadata structure
12942 *
12943 * RETURN : None
12944 *==========================================================================*/
12945int32_t QCamera3HardwareInterface::extractSceneMode(
12946 const CameraMetadata &frame_settings, uint8_t metaMode,
12947 metadata_buffer_t *hal_metadata)
12948{
12949 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012950 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12951
12952 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12953 LOGD("Ignoring control mode OFF_KEEP_STATE");
12954 return NO_ERROR;
12955 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012956
12957 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12958 camera_metadata_ro_entry entry =
12959 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12960 if (0 == entry.count)
12961 return rc;
12962
12963 uint8_t fwk_sceneMode = entry.data.u8[0];
12964
12965 int val = lookupHalName(SCENE_MODES_MAP,
12966 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12967 fwk_sceneMode);
12968 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012969 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012970 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012971 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012972 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012973
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012974 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12975 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12976 }
12977
12978 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12979 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012980 cam_hdr_param_t hdr_params;
12981 hdr_params.hdr_enable = 1;
12982 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12983 hdr_params.hdr_need_1x = false;
12984 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12985 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12986 rc = BAD_VALUE;
12987 }
12988 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012989
Thierry Strudel3d639192016-09-09 11:52:26 -070012990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12991 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12992 rc = BAD_VALUE;
12993 }
12994 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012995
12996 if (mForceHdrSnapshot) {
12997 cam_hdr_param_t hdr_params;
12998 hdr_params.hdr_enable = 1;
12999 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13000 hdr_params.hdr_need_1x = false;
13001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13002 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13003 rc = BAD_VALUE;
13004 }
13005 }
13006
Thierry Strudel3d639192016-09-09 11:52:26 -070013007 return rc;
13008}
13009
13010/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013011 * FUNCTION : setVideoHdrMode
13012 *
13013 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13014 *
13015 * PARAMETERS :
13016 * @hal_metadata: hal metadata structure
13017 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13018 *
13019 * RETURN : None
13020 *==========================================================================*/
13021int32_t QCamera3HardwareInterface::setVideoHdrMode(
13022 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13023{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013024 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13025 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13026 }
13027
13028 LOGE("Invalid Video HDR mode %d!", vhdr);
13029 return BAD_VALUE;
13030}
13031
13032/*===========================================================================
13033 * FUNCTION : setSensorHDR
13034 *
13035 * DESCRIPTION: Enable/disable sensor HDR.
13036 *
13037 * PARAMETERS :
13038 * @hal_metadata: hal metadata structure
13039 * @enable: boolean whether to enable/disable sensor HDR
13040 *
13041 * RETURN : None
13042 *==========================================================================*/
13043int32_t QCamera3HardwareInterface::setSensorHDR(
13044 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13045{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013046 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013047 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13048
13049 if (enable) {
13050 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13051 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13052 #ifdef _LE_CAMERA_
13053 //Default to staggered HDR for IOT
13054 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13055 #else
13056 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13057 #endif
13058 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13059 }
13060
13061 bool isSupported = false;
13062 switch (sensor_hdr) {
13063 case CAM_SENSOR_HDR_IN_SENSOR:
13064 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13065 CAM_QCOM_FEATURE_SENSOR_HDR) {
13066 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013067 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013068 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013069 break;
13070 case CAM_SENSOR_HDR_ZIGZAG:
13071 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13072 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13073 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013074 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013075 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013076 break;
13077 case CAM_SENSOR_HDR_STAGGERED:
13078 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13079 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13080 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013081 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013082 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013083 break;
13084 case CAM_SENSOR_HDR_OFF:
13085 isSupported = true;
13086 LOGD("Turning off sensor HDR");
13087 break;
13088 default:
13089 LOGE("HDR mode %d not supported", sensor_hdr);
13090 rc = BAD_VALUE;
13091 break;
13092 }
13093
13094 if(isSupported) {
13095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13096 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13097 rc = BAD_VALUE;
13098 } else {
13099 if(!isVideoHdrEnable)
13100 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013101 }
13102 }
13103 return rc;
13104}
13105
13106/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013107 * FUNCTION : needRotationReprocess
13108 *
13109 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13110 *
13111 * PARAMETERS : none
13112 *
13113 * RETURN : true: needed
13114 * false: no need
13115 *==========================================================================*/
13116bool QCamera3HardwareInterface::needRotationReprocess()
13117{
13118 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13119 // current rotation is not zero, and pp has the capability to process rotation
13120 LOGH("need do reprocess for rotation");
13121 return true;
13122 }
13123
13124 return false;
13125}
13126
13127/*===========================================================================
13128 * FUNCTION : needReprocess
13129 *
13130 * DESCRIPTION: if reprocess in needed
13131 *
13132 * PARAMETERS : none
13133 *
13134 * RETURN : true: needed
13135 * false: no need
13136 *==========================================================================*/
13137bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13138{
13139 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13140 // TODO: add for ZSL HDR later
13141 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13142 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13143 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13144 return true;
13145 } else {
13146 LOGH("already post processed frame");
13147 return false;
13148 }
13149 }
13150 return needRotationReprocess();
13151}
13152
13153/*===========================================================================
13154 * FUNCTION : needJpegExifRotation
13155 *
13156 * DESCRIPTION: if rotation from jpeg is needed
13157 *
13158 * PARAMETERS : none
13159 *
13160 * RETURN : true: needed
13161 * false: no need
13162 *==========================================================================*/
13163bool QCamera3HardwareInterface::needJpegExifRotation()
13164{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013165 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013166 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13167 LOGD("Need use Jpeg EXIF Rotation");
13168 return true;
13169 }
13170 return false;
13171}
13172
13173/*===========================================================================
13174 * FUNCTION : addOfflineReprocChannel
13175 *
13176 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13177 * coming from input channel
13178 *
13179 * PARAMETERS :
13180 * @config : reprocess configuration
13181 * @inputChHandle : pointer to the input (source) channel
13182 *
13183 *
13184 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13185 *==========================================================================*/
13186QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13187 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13188{
13189 int32_t rc = NO_ERROR;
13190 QCamera3ReprocessChannel *pChannel = NULL;
13191
13192 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013193 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13194 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013195 if (NULL == pChannel) {
13196 LOGE("no mem for reprocess channel");
13197 return NULL;
13198 }
13199
13200 rc = pChannel->initialize(IS_TYPE_NONE);
13201 if (rc != NO_ERROR) {
13202 LOGE("init reprocess channel failed, ret = %d", rc);
13203 delete pChannel;
13204 return NULL;
13205 }
13206
13207 // pp feature config
13208 cam_pp_feature_config_t pp_config;
13209 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13210
13211 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13212 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13213 & CAM_QCOM_FEATURE_DSDN) {
13214 //Use CPP CDS incase h/w supports it.
13215 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13216 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13217 }
13218 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13219 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13220 }
13221
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013222 if (config.hdr_param.hdr_enable) {
13223 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13224 pp_config.hdr_param = config.hdr_param;
13225 }
13226
13227 if (mForceHdrSnapshot) {
13228 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13229 pp_config.hdr_param.hdr_enable = 1;
13230 pp_config.hdr_param.hdr_need_1x = 0;
13231 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13232 }
13233
Thierry Strudel3d639192016-09-09 11:52:26 -070013234 rc = pChannel->addReprocStreamsFromSource(pp_config,
13235 config,
13236 IS_TYPE_NONE,
13237 mMetadataChannel);
13238
13239 if (rc != NO_ERROR) {
13240 delete pChannel;
13241 return NULL;
13242 }
13243 return pChannel;
13244}
13245
13246/*===========================================================================
13247 * FUNCTION : getMobicatMask
13248 *
13249 * DESCRIPTION: returns mobicat mask
13250 *
13251 * PARAMETERS : none
13252 *
13253 * RETURN : mobicat mask
13254 *
13255 *==========================================================================*/
13256uint8_t QCamera3HardwareInterface::getMobicatMask()
13257{
13258 return m_MobicatMask;
13259}
13260
13261/*===========================================================================
13262 * FUNCTION : setMobicat
13263 *
13264 * DESCRIPTION: set Mobicat on/off.
13265 *
13266 * PARAMETERS :
13267 * @params : none
13268 *
13269 * RETURN : int32_t type of status
13270 * NO_ERROR -- success
13271 * none-zero failure code
13272 *==========================================================================*/
13273int32_t QCamera3HardwareInterface::setMobicat()
13274{
13275 char value [PROPERTY_VALUE_MAX];
13276 property_get("persist.camera.mobicat", value, "0");
13277 int32_t ret = NO_ERROR;
13278 uint8_t enableMobi = (uint8_t)atoi(value);
13279
13280 if (enableMobi) {
13281 tune_cmd_t tune_cmd;
13282 tune_cmd.type = SET_RELOAD_CHROMATIX;
13283 tune_cmd.module = MODULE_ALL;
13284 tune_cmd.value = TRUE;
13285 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13286 CAM_INTF_PARM_SET_VFE_COMMAND,
13287 tune_cmd);
13288
13289 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13290 CAM_INTF_PARM_SET_PP_COMMAND,
13291 tune_cmd);
13292 }
13293 m_MobicatMask = enableMobi;
13294
13295 return ret;
13296}
13297
13298/*===========================================================================
13299* FUNCTION : getLogLevel
13300*
13301* DESCRIPTION: Reads the log level property into a variable
13302*
13303* PARAMETERS :
13304* None
13305*
13306* RETURN :
13307* None
13308*==========================================================================*/
13309void QCamera3HardwareInterface::getLogLevel()
13310{
13311 char prop[PROPERTY_VALUE_MAX];
13312 uint32_t globalLogLevel = 0;
13313
13314 property_get("persist.camera.hal.debug", prop, "0");
13315 int val = atoi(prop);
13316 if (0 <= val) {
13317 gCamHal3LogLevel = (uint32_t)val;
13318 }
13319
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013320 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013321 gKpiDebugLevel = atoi(prop);
13322
13323 property_get("persist.camera.global.debug", prop, "0");
13324 val = atoi(prop);
13325 if (0 <= val) {
13326 globalLogLevel = (uint32_t)val;
13327 }
13328
13329 /* Highest log level among hal.logs and global.logs is selected */
13330 if (gCamHal3LogLevel < globalLogLevel)
13331 gCamHal3LogLevel = globalLogLevel;
13332
13333 return;
13334}
13335
13336/*===========================================================================
13337 * FUNCTION : validateStreamRotations
13338 *
13339 * DESCRIPTION: Check if the rotations requested are supported
13340 *
13341 * PARAMETERS :
13342 * @stream_list : streams to be configured
13343 *
13344 * RETURN : NO_ERROR on success
13345 * -EINVAL on failure
13346 *
13347 *==========================================================================*/
13348int QCamera3HardwareInterface::validateStreamRotations(
13349 camera3_stream_configuration_t *streamList)
13350{
13351 int rc = NO_ERROR;
13352
13353 /*
13354 * Loop through all streams requested in configuration
13355 * Check if unsupported rotations have been requested on any of them
13356 */
13357 for (size_t j = 0; j < streamList->num_streams; j++){
13358 camera3_stream_t *newStream = streamList->streams[j];
13359
13360 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13361 bool isImplDef = (newStream->format ==
13362 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13363 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13364 isImplDef);
13365
13366 if (isRotated && (!isImplDef || isZsl)) {
13367 LOGE("Error: Unsupported rotation of %d requested for stream"
13368 "type:%d and stream format:%d",
13369 newStream->rotation, newStream->stream_type,
13370 newStream->format);
13371 rc = -EINVAL;
13372 break;
13373 }
13374 }
13375
13376 return rc;
13377}
13378
13379/*===========================================================================
13380* FUNCTION : getFlashInfo
13381*
13382* DESCRIPTION: Retrieve information about whether the device has a flash.
13383*
13384* PARAMETERS :
13385* @cameraId : Camera id to query
13386* @hasFlash : Boolean indicating whether there is a flash device
13387* associated with given camera
13388* @flashNode : If a flash device exists, this will be its device node.
13389*
13390* RETURN :
13391* None
13392*==========================================================================*/
13393void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13394 bool& hasFlash,
13395 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13396{
13397 cam_capability_t* camCapability = gCamCapability[cameraId];
13398 if (NULL == camCapability) {
13399 hasFlash = false;
13400 flashNode[0] = '\0';
13401 } else {
13402 hasFlash = camCapability->flash_available;
13403 strlcpy(flashNode,
13404 (char*)camCapability->flash_dev_name,
13405 QCAMERA_MAX_FILEPATH_LENGTH);
13406 }
13407}
13408
13409/*===========================================================================
13410* FUNCTION : getEepromVersionInfo
13411*
13412* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13413*
13414* PARAMETERS : None
13415*
13416* RETURN : string describing EEPROM version
13417* "\0" if no such info available
13418*==========================================================================*/
13419const char *QCamera3HardwareInterface::getEepromVersionInfo()
13420{
13421 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13422}
13423
13424/*===========================================================================
13425* FUNCTION : getLdafCalib
13426*
13427* DESCRIPTION: Retrieve Laser AF calibration data
13428*
13429* PARAMETERS : None
13430*
13431* RETURN : Two uint32_t describing laser AF calibration data
13432* NULL if none is available.
13433*==========================================================================*/
13434const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13435{
13436 if (mLdafCalibExist) {
13437 return &mLdafCalib[0];
13438 } else {
13439 return NULL;
13440 }
13441}
13442
13443/*===========================================================================
13444 * FUNCTION : dynamicUpdateMetaStreamInfo
13445 *
13446 * DESCRIPTION: This function:
13447 * (1) stops all the channels
13448 * (2) returns error on pending requests and buffers
13449 * (3) sends metastream_info in setparams
13450 * (4) starts all channels
13451 * This is useful when sensor has to be restarted to apply any
13452 * settings such as frame rate from a different sensor mode
13453 *
13454 * PARAMETERS : None
13455 *
13456 * RETURN : NO_ERROR on success
13457 * Error codes on failure
13458 *
13459 *==========================================================================*/
13460int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13461{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013462 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013463 int rc = NO_ERROR;
13464
13465 LOGD("E");
13466
13467 rc = stopAllChannels();
13468 if (rc < 0) {
13469 LOGE("stopAllChannels failed");
13470 return rc;
13471 }
13472
13473 rc = notifyErrorForPendingRequests();
13474 if (rc < 0) {
13475 LOGE("notifyErrorForPendingRequests failed");
13476 return rc;
13477 }
13478
13479 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13480 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13481 "Format:%d",
13482 mStreamConfigInfo.type[i],
13483 mStreamConfigInfo.stream_sizes[i].width,
13484 mStreamConfigInfo.stream_sizes[i].height,
13485 mStreamConfigInfo.postprocess_mask[i],
13486 mStreamConfigInfo.format[i]);
13487 }
13488
13489 /* Send meta stream info once again so that ISP can start */
13490 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13491 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13492 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13493 mParameters);
13494 if (rc < 0) {
13495 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13496 }
13497
13498 rc = startAllChannels();
13499 if (rc < 0) {
13500 LOGE("startAllChannels failed");
13501 return rc;
13502 }
13503
13504 LOGD("X");
13505 return rc;
13506}
13507
13508/*===========================================================================
13509 * FUNCTION : stopAllChannels
13510 *
13511 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13512 *
13513 * PARAMETERS : None
13514 *
13515 * RETURN : NO_ERROR on success
13516 * Error codes on failure
13517 *
13518 *==========================================================================*/
13519int32_t QCamera3HardwareInterface::stopAllChannels()
13520{
13521 int32_t rc = NO_ERROR;
13522
13523 LOGD("Stopping all channels");
13524 // Stop the Streams/Channels
13525 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13526 it != mStreamInfo.end(); it++) {
13527 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13528 if (channel) {
13529 channel->stop();
13530 }
13531 (*it)->status = INVALID;
13532 }
13533
13534 if (mSupportChannel) {
13535 mSupportChannel->stop();
13536 }
13537 if (mAnalysisChannel) {
13538 mAnalysisChannel->stop();
13539 }
13540 if (mRawDumpChannel) {
13541 mRawDumpChannel->stop();
13542 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013543 if (mHdrPlusRawSrcChannel) {
13544 mHdrPlusRawSrcChannel->stop();
13545 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013546 if (mMetadataChannel) {
13547 /* If content of mStreamInfo is not 0, there is metadata stream */
13548 mMetadataChannel->stop();
13549 }
13550
13551 LOGD("All channels stopped");
13552 return rc;
13553}
13554
13555/*===========================================================================
13556 * FUNCTION : startAllChannels
13557 *
13558 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13559 *
13560 * PARAMETERS : None
13561 *
13562 * RETURN : NO_ERROR on success
13563 * Error codes on failure
13564 *
13565 *==========================================================================*/
13566int32_t QCamera3HardwareInterface::startAllChannels()
13567{
13568 int32_t rc = NO_ERROR;
13569
13570 LOGD("Start all channels ");
13571 // Start the Streams/Channels
13572 if (mMetadataChannel) {
13573 /* If content of mStreamInfo is not 0, there is metadata stream */
13574 rc = mMetadataChannel->start();
13575 if (rc < 0) {
13576 LOGE("META channel start failed");
13577 return rc;
13578 }
13579 }
13580 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13581 it != mStreamInfo.end(); it++) {
13582 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13583 if (channel) {
13584 rc = channel->start();
13585 if (rc < 0) {
13586 LOGE("channel start failed");
13587 return rc;
13588 }
13589 }
13590 }
13591 if (mAnalysisChannel) {
13592 mAnalysisChannel->start();
13593 }
13594 if (mSupportChannel) {
13595 rc = mSupportChannel->start();
13596 if (rc < 0) {
13597 LOGE("Support channel start failed");
13598 return rc;
13599 }
13600 }
13601 if (mRawDumpChannel) {
13602 rc = mRawDumpChannel->start();
13603 if (rc < 0) {
13604 LOGE("RAW dump channel start failed");
13605 return rc;
13606 }
13607 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013608 if (mHdrPlusRawSrcChannel) {
13609 rc = mHdrPlusRawSrcChannel->start();
13610 if (rc < 0) {
13611 LOGE("HDR+ RAW channel start failed");
13612 return rc;
13613 }
13614 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013615
13616 LOGD("All channels started");
13617 return rc;
13618}
13619
13620/*===========================================================================
13621 * FUNCTION : notifyErrorForPendingRequests
13622 *
13623 * DESCRIPTION: This function sends error for all the pending requests/buffers
13624 *
13625 * PARAMETERS : None
13626 *
13627 * RETURN : Error codes
13628 * NO_ERROR on success
13629 *
13630 *==========================================================================*/
13631int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13632{
13633 int32_t rc = NO_ERROR;
13634 unsigned int frameNum = 0;
13635 camera3_capture_result_t result;
13636 camera3_stream_buffer_t *pStream_Buf = NULL;
13637
13638 memset(&result, 0, sizeof(camera3_capture_result_t));
13639
13640 if (mPendingRequestsList.size() > 0) {
13641 pendingRequestIterator i = mPendingRequestsList.begin();
13642 frameNum = i->frame_number;
13643 } else {
13644 /* There might still be pending buffers even though there are
13645 no pending requests. Setting the frameNum to MAX so that
13646 all the buffers with smaller frame numbers are returned */
13647 frameNum = UINT_MAX;
13648 }
13649
13650 LOGH("Oldest frame num on mPendingRequestsList = %u",
13651 frameNum);
13652
Emilian Peev7650c122017-01-19 08:24:33 -080013653 notifyErrorFoPendingDepthData(mDepthChannel);
13654
Thierry Strudel3d639192016-09-09 11:52:26 -070013655 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13656 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13657
13658 if (req->frame_number < frameNum) {
13659 // Send Error notify to frameworks for each buffer for which
13660 // metadata buffer is already sent
13661 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13662 req->frame_number, req->mPendingBufferList.size());
13663
13664 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13665 if (NULL == pStream_Buf) {
13666 LOGE("No memory for pending buffers array");
13667 return NO_MEMORY;
13668 }
13669 memset(pStream_Buf, 0,
13670 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13671 result.result = NULL;
13672 result.frame_number = req->frame_number;
13673 result.num_output_buffers = req->mPendingBufferList.size();
13674 result.output_buffers = pStream_Buf;
13675
13676 size_t index = 0;
13677 for (auto info = req->mPendingBufferList.begin();
13678 info != req->mPendingBufferList.end(); ) {
13679
13680 camera3_notify_msg_t notify_msg;
13681 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13682 notify_msg.type = CAMERA3_MSG_ERROR;
13683 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13684 notify_msg.message.error.error_stream = info->stream;
13685 notify_msg.message.error.frame_number = req->frame_number;
13686 pStream_Buf[index].acquire_fence = -1;
13687 pStream_Buf[index].release_fence = -1;
13688 pStream_Buf[index].buffer = info->buffer;
13689 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13690 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013691 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013692 index++;
13693 // Remove buffer from list
13694 info = req->mPendingBufferList.erase(info);
13695 }
13696
13697 // Remove this request from Map
13698 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13699 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13700 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13701
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013702 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013703
13704 delete [] pStream_Buf;
13705 } else {
13706
13707 // Go through the pending requests info and send error request to framework
13708 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13709
13710 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13711
13712 // Send error notify to frameworks
13713 camera3_notify_msg_t notify_msg;
13714 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13715 notify_msg.type = CAMERA3_MSG_ERROR;
13716 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13717 notify_msg.message.error.error_stream = NULL;
13718 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013719 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013720
13721 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13722 if (NULL == pStream_Buf) {
13723 LOGE("No memory for pending buffers array");
13724 return NO_MEMORY;
13725 }
13726 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13727
13728 result.result = NULL;
13729 result.frame_number = req->frame_number;
13730 result.input_buffer = i->input_buffer;
13731 result.num_output_buffers = req->mPendingBufferList.size();
13732 result.output_buffers = pStream_Buf;
13733
13734 size_t index = 0;
13735 for (auto info = req->mPendingBufferList.begin();
13736 info != req->mPendingBufferList.end(); ) {
13737 pStream_Buf[index].acquire_fence = -1;
13738 pStream_Buf[index].release_fence = -1;
13739 pStream_Buf[index].buffer = info->buffer;
13740 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13741 pStream_Buf[index].stream = info->stream;
13742 index++;
13743 // Remove buffer from list
13744 info = req->mPendingBufferList.erase(info);
13745 }
13746
13747 // Remove this request from Map
13748 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13749 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13750 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13751
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013752 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013753 delete [] pStream_Buf;
13754 i = erasePendingRequest(i);
13755 }
13756 }
13757
13758 /* Reset pending frame Drop list and requests list */
13759 mPendingFrameDropList.clear();
13760
13761 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13762 req.mPendingBufferList.clear();
13763 }
13764 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013765 LOGH("Cleared all the pending buffers ");
13766
13767 return rc;
13768}
13769
13770bool QCamera3HardwareInterface::isOnEncoder(
13771 const cam_dimension_t max_viewfinder_size,
13772 uint32_t width, uint32_t height)
13773{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013774 return ((width > (uint32_t)max_viewfinder_size.width) ||
13775 (height > (uint32_t)max_viewfinder_size.height) ||
13776 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13777 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013778}
13779
13780/*===========================================================================
13781 * FUNCTION : setBundleInfo
13782 *
13783 * DESCRIPTION: Set bundle info for all streams that are bundle.
13784 *
13785 * PARAMETERS : None
13786 *
13787 * RETURN : NO_ERROR on success
13788 * Error codes on failure
13789 *==========================================================================*/
13790int32_t QCamera3HardwareInterface::setBundleInfo()
13791{
13792 int32_t rc = NO_ERROR;
13793
13794 if (mChannelHandle) {
13795 cam_bundle_config_t bundleInfo;
13796 memset(&bundleInfo, 0, sizeof(bundleInfo));
13797 rc = mCameraHandle->ops->get_bundle_info(
13798 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13799 if (rc != NO_ERROR) {
13800 LOGE("get_bundle_info failed");
13801 return rc;
13802 }
13803 if (mAnalysisChannel) {
13804 mAnalysisChannel->setBundleInfo(bundleInfo);
13805 }
13806 if (mSupportChannel) {
13807 mSupportChannel->setBundleInfo(bundleInfo);
13808 }
13809 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13810 it != mStreamInfo.end(); it++) {
13811 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13812 channel->setBundleInfo(bundleInfo);
13813 }
13814 if (mRawDumpChannel) {
13815 mRawDumpChannel->setBundleInfo(bundleInfo);
13816 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013817 if (mHdrPlusRawSrcChannel) {
13818 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13819 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013820 }
13821
13822 return rc;
13823}
13824
13825/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013826 * FUNCTION : setInstantAEC
13827 *
13828 * DESCRIPTION: Set Instant AEC related params.
13829 *
13830 * PARAMETERS :
13831 * @meta: CameraMetadata reference
13832 *
13833 * RETURN : NO_ERROR on success
13834 * Error codes on failure
13835 *==========================================================================*/
13836int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13837{
13838 int32_t rc = NO_ERROR;
13839 uint8_t val = 0;
13840 char prop[PROPERTY_VALUE_MAX];
13841
13842 // First try to configure instant AEC from framework metadata
13843 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13844 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13845 }
13846
13847 // If framework did not set this value, try to read from set prop.
13848 if (val == 0) {
13849 memset(prop, 0, sizeof(prop));
13850 property_get("persist.camera.instant.aec", prop, "0");
13851 val = (uint8_t)atoi(prop);
13852 }
13853
13854 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13855 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13856 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13857 mInstantAEC = val;
13858 mInstantAECSettledFrameNumber = 0;
13859 mInstantAecFrameIdxCount = 0;
13860 LOGH("instantAEC value set %d",val);
13861 if (mInstantAEC) {
13862 memset(prop, 0, sizeof(prop));
13863 property_get("persist.camera.ae.instant.bound", prop, "10");
13864 int32_t aec_frame_skip_cnt = atoi(prop);
13865 if (aec_frame_skip_cnt >= 0) {
13866 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13867 } else {
13868 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13869 rc = BAD_VALUE;
13870 }
13871 }
13872 } else {
13873 LOGE("Bad instant aec value set %d", val);
13874 rc = BAD_VALUE;
13875 }
13876 return rc;
13877}
13878
13879/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013880 * FUNCTION : get_num_overall_buffers
13881 *
13882 * DESCRIPTION: Estimate number of pending buffers across all requests.
13883 *
13884 * PARAMETERS : None
13885 *
13886 * RETURN : Number of overall pending buffers
13887 *
13888 *==========================================================================*/
13889uint32_t PendingBuffersMap::get_num_overall_buffers()
13890{
13891 uint32_t sum_buffers = 0;
13892 for (auto &req : mPendingBuffersInRequest) {
13893 sum_buffers += req.mPendingBufferList.size();
13894 }
13895 return sum_buffers;
13896}
13897
13898/*===========================================================================
13899 * FUNCTION : removeBuf
13900 *
13901 * DESCRIPTION: Remove a matching buffer from tracker.
13902 *
13903 * PARAMETERS : @buffer: image buffer for the callback
13904 *
13905 * RETURN : None
13906 *
13907 *==========================================================================*/
13908void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13909{
13910 bool buffer_found = false;
13911 for (auto req = mPendingBuffersInRequest.begin();
13912 req != mPendingBuffersInRequest.end(); req++) {
13913 for (auto k = req->mPendingBufferList.begin();
13914 k != req->mPendingBufferList.end(); k++ ) {
13915 if (k->buffer == buffer) {
13916 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13917 req->frame_number, buffer);
13918 k = req->mPendingBufferList.erase(k);
13919 if (req->mPendingBufferList.empty()) {
13920 // Remove this request from Map
13921 req = mPendingBuffersInRequest.erase(req);
13922 }
13923 buffer_found = true;
13924 break;
13925 }
13926 }
13927 if (buffer_found) {
13928 break;
13929 }
13930 }
13931 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13932 get_num_overall_buffers());
13933}
13934
13935/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013936 * FUNCTION : getBufErrStatus
13937 *
13938 * DESCRIPTION: get buffer error status
13939 *
13940 * PARAMETERS : @buffer: buffer handle
13941 *
13942 * RETURN : Error status
13943 *
13944 *==========================================================================*/
13945int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13946{
13947 for (auto& req : mPendingBuffersInRequest) {
13948 for (auto& k : req.mPendingBufferList) {
13949 if (k.buffer == buffer)
13950 return k.bufStatus;
13951 }
13952 }
13953 return CAMERA3_BUFFER_STATUS_OK;
13954}
13955
13956/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013957 * FUNCTION : setPAAFSupport
13958 *
13959 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13960 * feature mask according to stream type and filter
13961 * arrangement
13962 *
13963 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13964 * @stream_type: stream type
13965 * @filter_arrangement: filter arrangement
13966 *
13967 * RETURN : None
13968 *==========================================================================*/
13969void QCamera3HardwareInterface::setPAAFSupport(
13970 cam_feature_mask_t& feature_mask,
13971 cam_stream_type_t stream_type,
13972 cam_color_filter_arrangement_t filter_arrangement)
13973{
Thierry Strudel3d639192016-09-09 11:52:26 -070013974 switch (filter_arrangement) {
13975 case CAM_FILTER_ARRANGEMENT_RGGB:
13976 case CAM_FILTER_ARRANGEMENT_GRBG:
13977 case CAM_FILTER_ARRANGEMENT_GBRG:
13978 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013979 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13980 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013981 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013982 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13983 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013984 }
13985 break;
13986 case CAM_FILTER_ARRANGEMENT_Y:
13987 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13988 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13989 }
13990 break;
13991 default:
13992 break;
13993 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070013994 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13995 feature_mask, stream_type, filter_arrangement);
13996
13997
Thierry Strudel3d639192016-09-09 11:52:26 -070013998}
13999
14000/*===========================================================================
14001* FUNCTION : getSensorMountAngle
14002*
14003* DESCRIPTION: Retrieve sensor mount angle
14004*
14005* PARAMETERS : None
14006*
14007* RETURN : sensor mount angle in uint32_t
14008*==========================================================================*/
14009uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14010{
14011 return gCamCapability[mCameraId]->sensor_mount_angle;
14012}
14013
14014/*===========================================================================
14015* FUNCTION : getRelatedCalibrationData
14016*
14017* DESCRIPTION: Retrieve related system calibration data
14018*
14019* PARAMETERS : None
14020*
14021* RETURN : Pointer of related system calibration data
14022*==========================================================================*/
14023const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14024{
14025 return (const cam_related_system_calibration_data_t *)
14026 &(gCamCapability[mCameraId]->related_cam_calibration);
14027}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014028
14029/*===========================================================================
14030 * FUNCTION : is60HzZone
14031 *
14032 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14033 *
14034 * PARAMETERS : None
14035 *
14036 * RETURN : True if in 60Hz zone, False otherwise
14037 *==========================================================================*/
14038bool QCamera3HardwareInterface::is60HzZone()
14039{
14040 time_t t = time(NULL);
14041 struct tm lt;
14042
14043 struct tm* r = localtime_r(&t, &lt);
14044
14045 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14046 return true;
14047 else
14048 return false;
14049}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014050
14051/*===========================================================================
14052 * FUNCTION : adjustBlackLevelForCFA
14053 *
14054 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14055 * of bayer CFA (Color Filter Array).
14056 *
14057 * PARAMETERS : @input: black level pattern in the order of RGGB
14058 * @output: black level pattern in the order of CFA
14059 * @color_arrangement: CFA color arrangement
14060 *
14061 * RETURN : None
14062 *==========================================================================*/
14063template<typename T>
14064void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14065 T input[BLACK_LEVEL_PATTERN_CNT],
14066 T output[BLACK_LEVEL_PATTERN_CNT],
14067 cam_color_filter_arrangement_t color_arrangement)
14068{
14069 switch (color_arrangement) {
14070 case CAM_FILTER_ARRANGEMENT_GRBG:
14071 output[0] = input[1];
14072 output[1] = input[0];
14073 output[2] = input[3];
14074 output[3] = input[2];
14075 break;
14076 case CAM_FILTER_ARRANGEMENT_GBRG:
14077 output[0] = input[2];
14078 output[1] = input[3];
14079 output[2] = input[0];
14080 output[3] = input[1];
14081 break;
14082 case CAM_FILTER_ARRANGEMENT_BGGR:
14083 output[0] = input[3];
14084 output[1] = input[2];
14085 output[2] = input[1];
14086 output[3] = input[0];
14087 break;
14088 case CAM_FILTER_ARRANGEMENT_RGGB:
14089 output[0] = input[0];
14090 output[1] = input[1];
14091 output[2] = input[2];
14092 output[3] = input[3];
14093 break;
14094 default:
14095 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14096 break;
14097 }
14098}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014099
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014100void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14101 CameraMetadata &resultMetadata,
14102 std::shared_ptr<metadata_buffer_t> settings)
14103{
14104 if (settings == nullptr) {
14105 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14106 return;
14107 }
14108
14109 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14110 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14111 }
14112
14113 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14114 String8 str((const char *)gps_methods);
14115 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14116 }
14117
14118 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14119 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14120 }
14121
14122 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14123 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14124 }
14125
14126 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14127 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14128 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14129 }
14130
14131 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14132 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14133 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14134 }
14135
14136 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14137 int32_t fwk_thumb_size[2];
14138 fwk_thumb_size[0] = thumb_size->width;
14139 fwk_thumb_size[1] = thumb_size->height;
14140 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14141 }
14142
14143 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14144 uint8_t fwk_intent = intent[0];
14145 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14146 }
14147}
14148
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014149bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
14150 const camera3_capture_request_t &request, const CameraMetadata &metadata)
14151{
14152 if (hdrPlusRequest == nullptr) return false;
14153
14154 // Check noise reduction mode is high quality.
14155 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14156 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14157 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014158 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14159 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014160 return false;
14161 }
14162
14163 // Check edge mode is high quality.
14164 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14165 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14166 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14167 return false;
14168 }
14169
14170 if (request.num_output_buffers != 1 ||
14171 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14172 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014173 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14174 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14175 request.output_buffers[0].stream->width,
14176 request.output_buffers[0].stream->height,
14177 request.output_buffers[0].stream->format);
14178 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014179 return false;
14180 }
14181
14182 // Get a YUV buffer from pic channel.
14183 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14184 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14185 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14186 if (res != OK) {
14187 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14188 __FUNCTION__, strerror(-res), res);
14189 return false;
14190 }
14191
14192 pbcamera::StreamBuffer buffer;
14193 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014194 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014195 buffer.data = yuvBuffer->buffer;
14196 buffer.dataSize = yuvBuffer->frame_len;
14197
14198 pbcamera::CaptureRequest pbRequest;
14199 pbRequest.id = request.frame_number;
14200 pbRequest.outputBuffers.push_back(buffer);
14201
14202 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014203 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014204 if (res != OK) {
14205 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14206 strerror(-res), res);
14207 return false;
14208 }
14209
14210 hdrPlusRequest->yuvBuffer = yuvBuffer;
14211 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14212
14213 return true;
14214}
14215
Chien-Yu Chenee335912017-02-09 17:53:20 -080014216status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14217{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014218 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014219 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
14220 return -ENODEV;
14221 }
14222
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014223 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014224
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014225 // Connect to HDR+ service if it's not connected yet.
14226 pthread_mutex_lock(&gCamLock);
14227 if (!gEaselConnected) {
14228 // Connect to HDR+ service
14229 res = gHdrPlusClient->connect(this);
14230 if (res != OK) {
14231 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
14232 strerror(-res), res);
14233 pthread_mutex_unlock(&gCamLock);
14234 return res;
14235 }
14236
14237 // Set static metadata.
14238 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14239 if (res != OK) {
14240 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
14241 strerror(-res), res);
14242 gHdrPlusClient->disconnect();
14243 pthread_mutex_unlock(&gCamLock);
14244 return res;
14245 }
14246 gEaselConnected = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014247 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014248 pthread_mutex_unlock(&gCamLock);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014249
14250 // Configure stream for HDR+.
14251 res = configureHdrPlusStreamsLocked();
14252 if (res != OK) {
14253 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014254 return res;
14255 }
14256
14257 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14258 res = gHdrPlusClient->setZslHdrPlusMode(true);
14259 if (res != OK) {
14260 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014261 return res;
14262 }
14263
14264 mHdrPlusModeEnabled = true;
14265 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14266
14267 return OK;
14268}
14269
14270void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14271{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014272 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014273 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014274 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14275 if (res != OK) {
14276 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14277 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014278 }
14279
14280 mHdrPlusModeEnabled = false;
14281 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14282}
14283
14284status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014285{
14286 pbcamera::InputConfiguration inputConfig;
14287 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14288 status_t res = OK;
14289
14290 // Configure HDR+ client streams.
14291 // Get input config.
14292 if (mHdrPlusRawSrcChannel) {
14293 // HDR+ input buffers will be provided by HAL.
14294 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14295 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14296 if (res != OK) {
14297 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14298 __FUNCTION__, strerror(-res), res);
14299 return res;
14300 }
14301
14302 inputConfig.isSensorInput = false;
14303 } else {
14304 // Sensor MIPI will send data to Easel.
14305 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014306 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014307 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14308 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14309 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14310 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14311 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14312 if (mSensorModeInfo.num_raw_bits != 10) {
14313 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14314 mSensorModeInfo.num_raw_bits);
14315 return BAD_VALUE;
14316 }
14317
14318 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014319 }
14320
14321 // Get output configurations.
14322 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014323 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014324
14325 // Easel may need to output YUV output buffers if mPictureChannel was created.
14326 pbcamera::StreamConfiguration yuvOutputConfig;
14327 if (mPictureChannel != nullptr) {
14328 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14329 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14330 if (res != OK) {
14331 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14332 __FUNCTION__, strerror(-res), res);
14333
14334 return res;
14335 }
14336
14337 outputStreamConfigs.push_back(yuvOutputConfig);
14338 }
14339
14340 // TODO: consider other channels for YUV output buffers.
14341
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014342 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014343 if (res != OK) {
14344 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14345 strerror(-res), res);
14346 return res;
14347 }
14348
14349 return OK;
14350}
14351
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014352void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14353 const camera_metadata_t &resultMetadata) {
14354 if (result != nullptr) {
14355 if (result->outputBuffers.size() != 1) {
14356 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14357 result->outputBuffers.size());
14358 return;
14359 }
14360
14361 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14362 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14363 result->outputBuffers[0].streamId);
14364 return;
14365 }
14366
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014367 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014368 HdrPlusPendingRequest pendingRequest;
14369 {
14370 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14371 auto req = mHdrPlusPendingRequests.find(result->requestId);
14372 pendingRequest = req->second;
14373 }
14374
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014375 // Update the result metadata with the settings of the HDR+ still capture request because
14376 // the result metadata belongs to a ZSL buffer.
14377 CameraMetadata metadata;
14378 metadata = &resultMetadata;
14379 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14380 camera_metadata_t* updatedResultMetadata = metadata.release();
14381
14382 QCamera3PicChannel *picChannel =
14383 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14384
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014385 // Check if dumping HDR+ YUV output is enabled.
14386 char prop[PROPERTY_VALUE_MAX];
14387 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14388 bool dumpYuvOutput = atoi(prop);
14389
14390 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014391 // Dump yuv buffer to a ppm file.
14392 pbcamera::StreamConfiguration outputConfig;
14393 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14394 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14395 if (rc == OK) {
14396 char buf[FILENAME_MAX] = {};
14397 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14398 result->requestId, result->outputBuffers[0].streamId,
14399 outputConfig.image.width, outputConfig.image.height);
14400
14401 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14402 } else {
14403 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14404 __FUNCTION__, strerror(-rc), rc);
14405 }
14406 }
14407
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014408 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14409 auto halMetadata = std::make_shared<metadata_buffer_t>();
14410 clear_metadata_buffer(halMetadata.get());
14411
14412 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14413 // encoding.
14414 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14415 halStreamId, /*minFrameDuration*/0);
14416 if (res == OK) {
14417 // Return the buffer to pic channel for encoding.
14418 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14419 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14420 halMetadata);
14421 } else {
14422 // Return the buffer without encoding.
14423 // TODO: This should not happen but we may want to report an error buffer to camera
14424 // service.
14425 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14426 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14427 strerror(-res), res);
14428 }
14429
14430 // Send HDR+ metadata to framework.
14431 {
14432 pthread_mutex_lock(&mMutex);
14433
14434 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14435 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14436 pthread_mutex_unlock(&mMutex);
14437 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014438
14439 // Remove the HDR+ pending request.
14440 {
14441 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14442 auto req = mHdrPlusPendingRequests.find(result->requestId);
14443 mHdrPlusPendingRequests.erase(req);
14444 }
14445 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014446}
14447
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014448void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14449 // TODO: Handle HDR+ capture failures and send the failure to framework.
14450 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14451 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14452
14453 // Return the buffer to pic channel.
14454 QCamera3PicChannel *picChannel =
14455 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14456 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14457
14458 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014459}
14460
Thierry Strudel3d639192016-09-09 11:52:26 -070014461}; //end namespace qcamera