blob: 4512004a508a87c43960ce013c65020dfa3c101d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700135// If Easel is connected.
136bool gEaselConnected;
Thierry Strudel3d639192016-09-09 11:52:26 -0700137
138const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
139 {"On", CAM_CDS_MODE_ON},
140 {"Off", CAM_CDS_MODE_OFF},
141 {"Auto",CAM_CDS_MODE_AUTO}
142};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_video_hdr_mode_t,
145 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
146 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
147 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
148};
149
Thierry Strudel54dc9782017-02-15 12:12:10 -0800150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_binning_correction_mode_t,
152 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
153 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
154 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
155};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700156
157const QCamera3HardwareInterface::QCameraMap<
158 camera_metadata_enum_android_ir_mode_t,
159 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
160 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
161 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
162 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
163};
Thierry Strudel3d639192016-09-09 11:52:26 -0700164
165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_control_effect_mode_t,
167 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
168 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
169 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
170 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
171 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
173 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
174 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
175 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
176 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
177};
178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_control_awb_mode_t,
181 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
182 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
183 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
184 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
185 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
186 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
187 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
188 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
189 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
190 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194 camera_metadata_enum_android_control_scene_mode_t,
195 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
196 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
197 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
198 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
199 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
200 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
201 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
202 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
203 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
204 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
205 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
206 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
207 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
208 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
209 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
210 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800211 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
212 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_af_mode_t,
217 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
218 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
219 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
220 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
221 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
222 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
223 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
224 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_color_correction_aberration_mode_t,
229 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
231 CAM_COLOR_CORRECTION_ABERRATION_OFF },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
233 CAM_COLOR_CORRECTION_ABERRATION_FAST },
234 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
235 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_ae_antibanding_mode_t,
240 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
243 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
244 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_control_ae_mode_t,
249 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
250 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
251 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
253 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
254 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_flash_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
260 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
262 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_statistics_face_detect_mode_t,
267 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
269 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
270 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
275 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
277 CAM_FOCUS_UNCALIBRATED },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
279 CAM_FOCUS_APPROXIMATE },
280 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
281 CAM_FOCUS_CALIBRATED }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_state_t,
286 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
287 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
288 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
289};
290
291const int32_t available_thumbnail_sizes[] = {0, 0,
292 176, 144,
293 240, 144,
294 256, 144,
295 240, 160,
296 256, 154,
297 240, 240,
298 320, 240};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_sensor_test_pattern_mode_t,
302 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
307 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
308 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
309};
310
311/* Since there is no mapping for all the options some Android enum are not listed.
312 * Also, the order in this list is important because while mapping from HAL to Android it will
313 * traverse from lower to higher index which means that for HAL values that are map to different
314 * Android values, the traverse logic will select the first one found.
315 */
316const QCamera3HardwareInterface::QCameraMap<
317 camera_metadata_enum_android_sensor_reference_illuminant1_t,
318 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
335};
336
337const QCamera3HardwareInterface::QCameraMap<
338 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
339 { 60, CAM_HFR_MODE_60FPS},
340 { 90, CAM_HFR_MODE_90FPS},
341 { 120, CAM_HFR_MODE_120FPS},
342 { 150, CAM_HFR_MODE_150FPS},
343 { 180, CAM_HFR_MODE_180FPS},
344 { 210, CAM_HFR_MODE_210FPS},
345 { 240, CAM_HFR_MODE_240FPS},
346 { 480, CAM_HFR_MODE_480FPS},
347};
348
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700349const QCamera3HardwareInterface::QCameraMap<
350 qcamera3_ext_instant_aec_mode_t,
351 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
352 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
353 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
354 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
355};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800356
357const QCamera3HardwareInterface::QCameraMap<
358 qcamera3_ext_exposure_meter_mode_t,
359 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
360 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
361 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
362 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
363 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
364 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
365 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
366 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
367};
368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_iso_mode_t,
371 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
372 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
373 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
374 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
375 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
376 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
377 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
378 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
379 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
380};
381
Thierry Strudel3d639192016-09-09 11:52:26 -0700382camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
383 .initialize = QCamera3HardwareInterface::initialize,
384 .configure_streams = QCamera3HardwareInterface::configure_streams,
385 .register_stream_buffers = NULL,
386 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
387 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
388 .get_metadata_vendor_tag_ops = NULL,
389 .dump = QCamera3HardwareInterface::dump,
390 .flush = QCamera3HardwareInterface::flush,
391 .reserved = {0},
392};
393
394// initialise to some default value
395uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
396
397/*===========================================================================
398 * FUNCTION : QCamera3HardwareInterface
399 *
400 * DESCRIPTION: constructor of QCamera3HardwareInterface
401 *
402 * PARAMETERS :
403 * @cameraId : camera ID
404 *
405 * RETURN : none
406 *==========================================================================*/
407QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
408 const camera_module_callbacks_t *callbacks)
409 : mCameraId(cameraId),
410 mCameraHandle(NULL),
411 mCameraInitialized(false),
412 mCallbackOps(NULL),
413 mMetadataChannel(NULL),
414 mPictureChannel(NULL),
415 mRawChannel(NULL),
416 mSupportChannel(NULL),
417 mAnalysisChannel(NULL),
418 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700419 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800421 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800422 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700423 mChannelHandle(0),
424 mFirstConfiguration(true),
425 mFlush(false),
426 mFlushPerf(false),
427 mParamHeap(NULL),
428 mParameters(NULL),
429 mPrevParameters(NULL),
430 m_bIsVideo(false),
431 m_bIs4KVideo(false),
432 m_bEisSupportedSize(false),
433 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800434 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 m_MobicatMask(0),
436 mMinProcessedFrameDuration(0),
437 mMinJpegFrameDuration(0),
438 mMinRawFrameDuration(0),
439 mMetaFrameCount(0U),
440 mUpdateDebugLevel(false),
441 mCallbacks(callbacks),
442 mCaptureIntent(0),
443 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700444 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800445 /* DevCamDebug metadata internal m control*/
446 mDevCamDebugMetaEnable(0),
447 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mBatchSize(0),
449 mToBeQueuedVidBufs(0),
450 mHFRVideoFps(DEFAULT_VIDEO_FPS),
451 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800452 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800453 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mFirstFrameNumberInBatch(0),
455 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700457 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
458 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700459 mInstantAEC(false),
460 mResetInstantAEC(false),
461 mInstantAECSettledFrameNumber(0),
462 mAecSkipDisplayFrameBound(0),
463 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800464 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mLastCustIntentFrmNum(-1),
467 mState(CLOSED),
468 mIsDeviceLinked(false),
469 mIsMainCamera(true),
470 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700471 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800472 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800473 mHdrPlusModeEnabled(false),
474 mIsApInputUsedForHdrPlus(false),
475 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800476 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700477{
478 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mCommon.init(gCamCapability[cameraId]);
480 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700481#ifndef USE_HAL_3_3
482 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
483#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700485#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mCameraDevice.common.close = close_camera_device;
487 mCameraDevice.ops = &mCameraOps;
488 mCameraDevice.priv = this;
489 gCamCapability[cameraId]->version = CAM_HAL_V3;
490 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
491 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
492 gCamCapability[cameraId]->min_num_pp_bufs = 3;
493
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800494 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700495
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800496 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700497 mPendingLiveRequest = 0;
498 mCurrentRequestId = -1;
499 pthread_mutex_init(&mMutex, NULL);
500
501 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
502 mDefaultMetadata[i] = NULL;
503
504 // Getting system props of different kinds
505 char prop[PROPERTY_VALUE_MAX];
506 memset(prop, 0, sizeof(prop));
507 property_get("persist.camera.raw.dump", prop, "0");
508 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800509 property_get("persist.camera.hal3.force.hdr", prop, "0");
510 mForceHdrSnapshot = atoi(prop);
511
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 if (mEnableRawDump)
513 LOGD("Raw dump from Camera HAL enabled");
514
515 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
516 memset(mLdafCalib, 0, sizeof(mLdafCalib));
517
518 memset(prop, 0, sizeof(prop));
519 property_get("persist.camera.tnr.preview", prop, "0");
520 m_bTnrPreview = (uint8_t)atoi(prop);
521
522 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800523 property_get("persist.camera.swtnr.preview", prop, "1");
524 m_bSwTnrPreview = (uint8_t)atoi(prop);
525
526 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 property_get("persist.camera.tnr.video", prop, "0");
528 m_bTnrVideo = (uint8_t)atoi(prop);
529
530 memset(prop, 0, sizeof(prop));
531 property_get("persist.camera.avtimer.debug", prop, "0");
532 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800533 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534
Thierry Strudel54dc9782017-02-15 12:12:10 -0800535 memset(prop, 0, sizeof(prop));
536 property_get("persist.camera.cacmode.disable", prop, "0");
537 m_cacModeDisabled = (uint8_t)atoi(prop);
538
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 //Load and read GPU library.
540 lib_surface_utils = NULL;
541 LINK_get_surface_pixel_alignment = NULL;
542 mSurfaceStridePadding = CAM_PAD_TO_32;
543 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
544 if (lib_surface_utils) {
545 *(void **)&LINK_get_surface_pixel_alignment =
546 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
547 if (LINK_get_surface_pixel_alignment) {
548 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
549 }
550 dlclose(lib_surface_utils);
551 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700552
553 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700554}
555
556/*===========================================================================
557 * FUNCTION : ~QCamera3HardwareInterface
558 *
559 * DESCRIPTION: destructor of QCamera3HardwareInterface
560 *
561 * PARAMETERS : none
562 *
563 * RETURN : none
564 *==========================================================================*/
565QCamera3HardwareInterface::~QCamera3HardwareInterface()
566{
567 LOGD("E");
568
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800569 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700570
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 // Disable power hint and enable the perf lock for close camera
572 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
573 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
574
575 // unlink of dualcam during close camera
576 if (mIsDeviceLinked) {
577 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
578 &m_pDualCamCmdPtr->bundle_info;
579 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
580 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
581 pthread_mutex_lock(&gCamLock);
582
583 if (mIsMainCamera == 1) {
584 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
585 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
586 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
587 // related session id should be session id of linked session
588 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
589 } else {
590 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
591 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
592 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
593 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
594 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800595 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800596 pthread_mutex_unlock(&gCamLock);
597
598 rc = mCameraHandle->ops->set_dual_cam_cmd(
599 mCameraHandle->camera_handle);
600 if (rc < 0) {
601 LOGE("Dualcam: Unlink failed, but still proceed to close");
602 }
603 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700604
605 /* We need to stop all streams before deleting any stream */
606 if (mRawDumpChannel) {
607 mRawDumpChannel->stop();
608 }
609
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700610 if (mHdrPlusRawSrcChannel) {
611 mHdrPlusRawSrcChannel->stop();
612 }
613
Thierry Strudel3d639192016-09-09 11:52:26 -0700614 // NOTE: 'camera3_stream_t *' objects are already freed at
615 // this stage by the framework
616 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
617 it != mStreamInfo.end(); it++) {
618 QCamera3ProcessingChannel *channel = (*it)->channel;
619 if (channel) {
620 channel->stop();
621 }
622 }
623 if (mSupportChannel)
624 mSupportChannel->stop();
625
626 if (mAnalysisChannel) {
627 mAnalysisChannel->stop();
628 }
629 if (mMetadataChannel) {
630 mMetadataChannel->stop();
631 }
632 if (mChannelHandle) {
633 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
634 mChannelHandle);
635 LOGD("stopping channel %d", mChannelHandle);
636 }
637
638 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
639 it != mStreamInfo.end(); it++) {
640 QCamera3ProcessingChannel *channel = (*it)->channel;
641 if (channel)
642 delete channel;
643 free (*it);
644 }
645 if (mSupportChannel) {
646 delete mSupportChannel;
647 mSupportChannel = NULL;
648 }
649
650 if (mAnalysisChannel) {
651 delete mAnalysisChannel;
652 mAnalysisChannel = NULL;
653 }
654 if (mRawDumpChannel) {
655 delete mRawDumpChannel;
656 mRawDumpChannel = NULL;
657 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700658 if (mHdrPlusRawSrcChannel) {
659 delete mHdrPlusRawSrcChannel;
660 mHdrPlusRawSrcChannel = NULL;
661 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700662 if (mDummyBatchChannel) {
663 delete mDummyBatchChannel;
664 mDummyBatchChannel = NULL;
665 }
666
667 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800668 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700669
670 if (mMetadataChannel) {
671 delete mMetadataChannel;
672 mMetadataChannel = NULL;
673 }
674
675 /* Clean up all channels */
676 if (mCameraInitialized) {
677 if(!mFirstConfiguration){
678 //send the last unconfigure
679 cam_stream_size_info_t stream_config_info;
680 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
681 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
682 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800683 m_bIs4KVideo ? 0 :
684 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700685 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700686 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
687 stream_config_info);
688 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
689 if (rc < 0) {
690 LOGE("set_parms failed for unconfigure");
691 }
692 }
693 deinitParameters();
694 }
695
696 if (mChannelHandle) {
697 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
698 mChannelHandle);
699 LOGH("deleting channel %d", mChannelHandle);
700 mChannelHandle = 0;
701 }
702
703 if (mState != CLOSED)
704 closeCamera();
705
706 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
707 req.mPendingBufferList.clear();
708 }
709 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700710 for (pendingRequestIterator i = mPendingRequestsList.begin();
711 i != mPendingRequestsList.end();) {
712 i = erasePendingRequest(i);
713 }
714 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
715 if (mDefaultMetadata[i])
716 free_camera_metadata(mDefaultMetadata[i]);
717
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800718 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 pthread_cond_destroy(&mRequestCond);
721
722 pthread_cond_destroy(&mBuffersCond);
723
724 pthread_mutex_destroy(&mMutex);
725 LOGD("X");
726}
727
728/*===========================================================================
729 * FUNCTION : erasePendingRequest
730 *
731 * DESCRIPTION: function to erase a desired pending request after freeing any
732 * allocated memory
733 *
734 * PARAMETERS :
735 * @i : iterator pointing to pending request to be erased
736 *
737 * RETURN : iterator pointing to the next request
738 *==========================================================================*/
739QCamera3HardwareInterface::pendingRequestIterator
740 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
741{
742 if (i->input_buffer != NULL) {
743 free(i->input_buffer);
744 i->input_buffer = NULL;
745 }
746 if (i->settings != NULL)
747 free_camera_metadata((camera_metadata_t*)i->settings);
748 return mPendingRequestsList.erase(i);
749}
750
751/*===========================================================================
752 * FUNCTION : camEvtHandle
753 *
754 * DESCRIPTION: Function registered to mm-camera-interface to handle events
755 *
756 * PARAMETERS :
757 * @camera_handle : interface layer camera handle
758 * @evt : ptr to event
759 * @user_data : user data ptr
760 *
761 * RETURN : none
762 *==========================================================================*/
763void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
764 mm_camera_event_t *evt,
765 void *user_data)
766{
767 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
768 if (obj && evt) {
769 switch(evt->server_event_type) {
770 case CAM_EVENT_TYPE_DAEMON_DIED:
771 pthread_mutex_lock(&obj->mMutex);
772 obj->mState = ERROR;
773 pthread_mutex_unlock(&obj->mMutex);
774 LOGE("Fatal, camera daemon died");
775 break;
776
777 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
778 LOGD("HAL got request pull from Daemon");
779 pthread_mutex_lock(&obj->mMutex);
780 obj->mWokenUpByDaemon = true;
781 obj->unblockRequestIfNecessary();
782 pthread_mutex_unlock(&obj->mMutex);
783 break;
784
785 default:
786 LOGW("Warning: Unhandled event %d",
787 evt->server_event_type);
788 break;
789 }
790 } else {
791 LOGE("NULL user_data/evt");
792 }
793}
794
795/*===========================================================================
796 * FUNCTION : openCamera
797 *
798 * DESCRIPTION: open camera
799 *
800 * PARAMETERS :
801 * @hw_device : double ptr for camera device struct
802 *
803 * RETURN : int32_t type of status
804 * NO_ERROR -- success
805 * none-zero failure code
806 *==========================================================================*/
807int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
808{
809 int rc = 0;
810 if (mState != CLOSED) {
811 *hw_device = NULL;
812 return PERMISSION_DENIED;
813 }
814
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800815 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700816 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
817 mCameraId);
818
819 rc = openCamera();
820 if (rc == 0) {
821 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800822 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700823 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800824 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700825
Thierry Strudel3d639192016-09-09 11:52:26 -0700826 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
827 mCameraId, rc);
828
829 if (rc == NO_ERROR) {
830 mState = OPENED;
831 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800832
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700833 if (gHdrPlusClient != nullptr) {
834 mIsApInputUsedForHdrPlus =
835 property_get_bool("persist.camera.hdrplus.apinput", false);
836 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
837 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
838 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800839
Thierry Strudel3d639192016-09-09 11:52:26 -0700840 return rc;
841}
842
843/*===========================================================================
844 * FUNCTION : openCamera
845 *
846 * DESCRIPTION: open camera
847 *
848 * PARAMETERS : none
849 *
850 * RETURN : int32_t type of status
851 * NO_ERROR -- success
852 * none-zero failure code
853 *==========================================================================*/
854int QCamera3HardwareInterface::openCamera()
855{
856 int rc = 0;
857 char value[PROPERTY_VALUE_MAX];
858
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800859 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700860 if (mCameraHandle) {
861 LOGE("Failure: Camera already opened");
862 return ALREADY_EXISTS;
863 }
864
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800865 if (gHdrPlusClient != nullptr) {
866 rc = gHdrPlusClient->resumeEasel();
867 if (rc != 0) {
868 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
869 return rc;
870 }
871 }
872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
874 if (rc < 0) {
875 LOGE("Failed to reserve flash for camera id: %d",
876 mCameraId);
877 return UNKNOWN_ERROR;
878 }
879
880 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
881 if (rc) {
882 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
883 return rc;
884 }
885
886 if (!mCameraHandle) {
887 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
888 return -ENODEV;
889 }
890
891 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
892 camEvtHandle, (void *)this);
893
894 if (rc < 0) {
895 LOGE("Error, failed to register event callback");
896 /* Not closing camera here since it is already handled in destructor */
897 return FAILED_TRANSACTION;
898 }
899
900 mExifParams.debug_params =
901 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
902 if (mExifParams.debug_params) {
903 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
904 } else {
905 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
906 return NO_MEMORY;
907 }
908 mFirstConfiguration = true;
909
910 //Notify display HAL that a camera session is active.
911 //But avoid calling the same during bootup because camera service might open/close
912 //cameras at boot time during its initialization and display service will also internally
913 //wait for camera service to initialize first while calling this display API, resulting in a
914 //deadlock situation. Since boot time camera open/close calls are made only to fetch
915 //capabilities, no need of this display bw optimization.
916 //Use "service.bootanim.exit" property to know boot status.
917 property_get("service.bootanim.exit", value, "0");
918 if (atoi(value) == 1) {
919 pthread_mutex_lock(&gCamLock);
920 if (gNumCameraSessions++ == 0) {
921 setCameraLaunchStatus(true);
922 }
923 pthread_mutex_unlock(&gCamLock);
924 }
925
926 //fill the session id needed while linking dual cam
927 pthread_mutex_lock(&gCamLock);
928 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
929 &sessionId[mCameraId]);
930 pthread_mutex_unlock(&gCamLock);
931
932 if (rc < 0) {
933 LOGE("Error, failed to get sessiion id");
934 return UNKNOWN_ERROR;
935 } else {
936 //Allocate related cam sync buffer
937 //this is needed for the payload that goes along with bundling cmd for related
938 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700939 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
940 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700941 if(rc != OK) {
942 rc = NO_MEMORY;
943 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
944 return NO_MEMORY;
945 }
946
947 //Map memory for related cam sync buffer
948 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700949 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
950 m_pDualCamCmdHeap->getFd(0),
951 sizeof(cam_dual_camera_cmd_info_t),
952 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700953 if(rc < 0) {
954 LOGE("Dualcam: failed to map Related cam sync buffer");
955 rc = FAILED_TRANSACTION;
956 return NO_MEMORY;
957 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700958 m_pDualCamCmdPtr =
959 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700960 }
961
962 LOGH("mCameraId=%d",mCameraId);
963
964 return NO_ERROR;
965}
966
967/*===========================================================================
968 * FUNCTION : closeCamera
969 *
970 * DESCRIPTION: close camera
971 *
972 * PARAMETERS : none
973 *
974 * RETURN : int32_t type of status
975 * NO_ERROR -- success
976 * none-zero failure code
977 *==========================================================================*/
978int QCamera3HardwareInterface::closeCamera()
979{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800980 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700981 int rc = NO_ERROR;
982 char value[PROPERTY_VALUE_MAX];
983
984 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
985 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700986
987 // unmap memory for related cam sync buffer
988 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800989 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700990 if (NULL != m_pDualCamCmdHeap) {
991 m_pDualCamCmdHeap->deallocate();
992 delete m_pDualCamCmdHeap;
993 m_pDualCamCmdHeap = NULL;
994 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700995 }
996
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
998 mCameraHandle = NULL;
999
1000 //reset session id to some invalid id
1001 pthread_mutex_lock(&gCamLock);
1002 sessionId[mCameraId] = 0xDEADBEEF;
1003 pthread_mutex_unlock(&gCamLock);
1004
1005 //Notify display HAL that there is no active camera session
1006 //but avoid calling the same during bootup. Refer to openCamera
1007 //for more details.
1008 property_get("service.bootanim.exit", value, "0");
1009 if (atoi(value) == 1) {
1010 pthread_mutex_lock(&gCamLock);
1011 if (--gNumCameraSessions == 0) {
1012 setCameraLaunchStatus(false);
1013 }
1014 pthread_mutex_unlock(&gCamLock);
1015 }
1016
Thierry Strudel3d639192016-09-09 11:52:26 -07001017 if (mExifParams.debug_params) {
1018 free(mExifParams.debug_params);
1019 mExifParams.debug_params = NULL;
1020 }
1021 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1022 LOGW("Failed to release flash for camera id: %d",
1023 mCameraId);
1024 }
1025 mState = CLOSED;
1026 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1027 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001028
1029 if (gHdrPlusClient != nullptr) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001030 // Disable HDR+ mode.
1031 disableHdrPlusModeLocked();
1032 // Disconnect Easel if it's connected.
1033 pthread_mutex_lock(&gCamLock);
1034 if (gEaselConnected) {
1035 gHdrPlusClient->disconnect();
1036 gEaselConnected = false;
1037 }
1038 pthread_mutex_unlock(&gCamLock);
1039
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001040 rc = gHdrPlusClient->suspendEasel();
1041 if (rc != 0) {
1042 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1043 }
1044 }
1045
Thierry Strudel3d639192016-09-09 11:52:26 -07001046 return rc;
1047}
1048
1049/*===========================================================================
1050 * FUNCTION : initialize
1051 *
1052 * DESCRIPTION: Initialize frameworks callback functions
1053 *
1054 * PARAMETERS :
1055 * @callback_ops : callback function to frameworks
1056 *
1057 * RETURN :
1058 *
1059 *==========================================================================*/
1060int QCamera3HardwareInterface::initialize(
1061 const struct camera3_callback_ops *callback_ops)
1062{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001063 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001064 int rc;
1065
1066 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1067 pthread_mutex_lock(&mMutex);
1068
1069 // Validate current state
1070 switch (mState) {
1071 case OPENED:
1072 /* valid state */
1073 break;
1074 default:
1075 LOGE("Invalid state %d", mState);
1076 rc = -ENODEV;
1077 goto err1;
1078 }
1079
1080 rc = initParameters();
1081 if (rc < 0) {
1082 LOGE("initParamters failed %d", rc);
1083 goto err1;
1084 }
1085 mCallbackOps = callback_ops;
1086
1087 mChannelHandle = mCameraHandle->ops->add_channel(
1088 mCameraHandle->camera_handle, NULL, NULL, this);
1089 if (mChannelHandle == 0) {
1090 LOGE("add_channel failed");
1091 rc = -ENOMEM;
1092 pthread_mutex_unlock(&mMutex);
1093 return rc;
1094 }
1095
1096 pthread_mutex_unlock(&mMutex);
1097 mCameraInitialized = true;
1098 mState = INITIALIZED;
1099 LOGI("X");
1100 return 0;
1101
1102err1:
1103 pthread_mutex_unlock(&mMutex);
1104 return rc;
1105}
1106
1107/*===========================================================================
1108 * FUNCTION : validateStreamDimensions
1109 *
1110 * DESCRIPTION: Check if the configuration requested are those advertised
1111 *
1112 * PARAMETERS :
1113 * @stream_list : streams to be configured
1114 *
1115 * RETURN :
1116 *
1117 *==========================================================================*/
1118int QCamera3HardwareInterface::validateStreamDimensions(
1119 camera3_stream_configuration_t *streamList)
1120{
1121 int rc = NO_ERROR;
1122 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001123 uint32_t depthWidth =
1124 gCamCapability[mCameraId]->active_array_size.width;
1125 uint32_t depthHeight =
1126 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001127
1128 camera3_stream_t *inputStream = NULL;
1129 /*
1130 * Loop through all streams to find input stream if it exists*
1131 */
1132 for (size_t i = 0; i< streamList->num_streams; i++) {
1133 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1134 if (inputStream != NULL) {
1135 LOGE("Error, Multiple input streams requested");
1136 return -EINVAL;
1137 }
1138 inputStream = streamList->streams[i];
1139 }
1140 }
1141 /*
1142 * Loop through all streams requested in configuration
1143 * Check if unsupported sizes have been requested on any of them
1144 */
1145 for (size_t j = 0; j < streamList->num_streams; j++) {
1146 bool sizeFound = false;
1147 camera3_stream_t *newStream = streamList->streams[j];
1148
1149 uint32_t rotatedHeight = newStream->height;
1150 uint32_t rotatedWidth = newStream->width;
1151 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1152 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1153 rotatedHeight = newStream->width;
1154 rotatedWidth = newStream->height;
1155 }
1156
1157 /*
1158 * Sizes are different for each type of stream format check against
1159 * appropriate table.
1160 */
1161 switch (newStream->format) {
1162 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1163 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1164 case HAL_PIXEL_FORMAT_RAW10:
1165 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1166 for (size_t i = 0; i < count; i++) {
1167 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1168 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1169 sizeFound = true;
1170 break;
1171 }
1172 }
1173 break;
1174 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001175 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1176 //As per spec. depth cloud should be sample count / 16
1177 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1178 if ((depthSamplesCount == newStream->width) &&
1179 (1 == newStream->height)) {
1180 sizeFound = true;
1181 }
1182 break;
1183 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001184 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1185 /* Verify set size against generated sizes table */
1186 for (size_t i = 0; i < count; i++) {
1187 if (((int32_t)rotatedWidth ==
1188 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1189 ((int32_t)rotatedHeight ==
1190 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1191 sizeFound = true;
1192 break;
1193 }
1194 }
1195 break;
1196 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1197 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1198 default:
1199 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1200 || newStream->stream_type == CAMERA3_STREAM_INPUT
1201 || IS_USAGE_ZSL(newStream->usage)) {
1202 if (((int32_t)rotatedWidth ==
1203 gCamCapability[mCameraId]->active_array_size.width) &&
1204 ((int32_t)rotatedHeight ==
1205 gCamCapability[mCameraId]->active_array_size.height)) {
1206 sizeFound = true;
1207 break;
1208 }
1209 /* We could potentially break here to enforce ZSL stream
1210 * set from frameworks always is full active array size
1211 * but it is not clear from the spc if framework will always
1212 * follow that, also we have logic to override to full array
1213 * size, so keeping the logic lenient at the moment
1214 */
1215 }
1216 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1217 MAX_SIZES_CNT);
1218 for (size_t i = 0; i < count; i++) {
1219 if (((int32_t)rotatedWidth ==
1220 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1221 ((int32_t)rotatedHeight ==
1222 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 } /* End of switch(newStream->format) */
1229
1230 /* We error out even if a single stream has unsupported size set */
1231 if (!sizeFound) {
1232 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1233 rotatedWidth, rotatedHeight, newStream->format,
1234 gCamCapability[mCameraId]->active_array_size.width,
1235 gCamCapability[mCameraId]->active_array_size.height);
1236 rc = -EINVAL;
1237 break;
1238 }
1239 } /* End of for each stream */
1240 return rc;
1241}
1242
1243/*==============================================================================
1244 * FUNCTION : isSupportChannelNeeded
1245 *
1246 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1247 *
1248 * PARAMETERS :
1249 * @stream_list : streams to be configured
1250 * @stream_config_info : the config info for streams to be configured
1251 *
1252 * RETURN : Boolen true/false decision
1253 *
1254 *==========================================================================*/
1255bool QCamera3HardwareInterface::isSupportChannelNeeded(
1256 camera3_stream_configuration_t *streamList,
1257 cam_stream_size_info_t stream_config_info)
1258{
1259 uint32_t i;
1260 bool pprocRequested = false;
1261 /* Check for conditions where PProc pipeline does not have any streams*/
1262 for (i = 0; i < stream_config_info.num_streams; i++) {
1263 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1264 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1265 pprocRequested = true;
1266 break;
1267 }
1268 }
1269
1270 if (pprocRequested == false )
1271 return true;
1272
1273 /* Dummy stream needed if only raw or jpeg streams present */
1274 for (i = 0; i < streamList->num_streams; i++) {
1275 switch(streamList->streams[i]->format) {
1276 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1277 case HAL_PIXEL_FORMAT_RAW10:
1278 case HAL_PIXEL_FORMAT_RAW16:
1279 case HAL_PIXEL_FORMAT_BLOB:
1280 break;
1281 default:
1282 return false;
1283 }
1284 }
1285 return true;
1286}
1287
1288/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001289 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001290 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001291 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001292 *
1293 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001294 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001295 *
1296 * RETURN : int32_t type of status
1297 * NO_ERROR -- success
1298 * none-zero failure code
1299 *
1300 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001301int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001302{
1303 int32_t rc = NO_ERROR;
1304
1305 cam_dimension_t max_dim = {0, 0};
1306 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1307 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1308 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1309 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1310 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1311 }
1312
1313 clear_metadata_buffer(mParameters);
1314
1315 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1316 max_dim);
1317 if (rc != NO_ERROR) {
1318 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1319 return rc;
1320 }
1321
1322 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1323 if (rc != NO_ERROR) {
1324 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1325 return rc;
1326 }
1327
1328 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001329 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001330
1331 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1332 mParameters);
1333 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001334 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001335 return rc;
1336 }
1337
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001338 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001339 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1340 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1341 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1342 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1343 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001344
1345 return rc;
1346}
1347
1348/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001349 * FUNCTION : addToPPFeatureMask
1350 *
1351 * DESCRIPTION: add additional features to pp feature mask based on
1352 * stream type and usecase
1353 *
1354 * PARAMETERS :
1355 * @stream_format : stream type for feature mask
1356 * @stream_idx : stream idx within postprocess_mask list to change
1357 *
1358 * RETURN : NULL
1359 *
1360 *==========================================================================*/
1361void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1362 uint32_t stream_idx)
1363{
1364 char feature_mask_value[PROPERTY_VALUE_MAX];
1365 cam_feature_mask_t feature_mask;
1366 int args_converted;
1367 int property_len;
1368
1369 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001370#ifdef _LE_CAMERA_
1371 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1372 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1373 property_len = property_get("persist.camera.hal3.feature",
1374 feature_mask_value, swtnr_feature_mask_value);
1375#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001376 property_len = property_get("persist.camera.hal3.feature",
1377 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001378#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001379 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1380 (feature_mask_value[1] == 'x')) {
1381 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1382 } else {
1383 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1384 }
1385 if (1 != args_converted) {
1386 feature_mask = 0;
1387 LOGE("Wrong feature mask %s", feature_mask_value);
1388 return;
1389 }
1390
1391 switch (stream_format) {
1392 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1393 /* Add LLVD to pp feature mask only if video hint is enabled */
1394 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1395 mStreamConfigInfo.postprocess_mask[stream_idx]
1396 |= CAM_QTI_FEATURE_SW_TNR;
1397 LOGH("Added SW TNR to pp feature mask");
1398 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1399 mStreamConfigInfo.postprocess_mask[stream_idx]
1400 |= CAM_QCOM_FEATURE_LLVD;
1401 LOGH("Added LLVD SeeMore to pp feature mask");
1402 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001403 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1404 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1405 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1406 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001407 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1408 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1409 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1410 CAM_QTI_FEATURE_BINNING_CORRECTION;
1411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001412 break;
1413 }
1414 default:
1415 break;
1416 }
1417 LOGD("PP feature mask %llx",
1418 mStreamConfigInfo.postprocess_mask[stream_idx]);
1419}
1420
1421/*==============================================================================
1422 * FUNCTION : updateFpsInPreviewBuffer
1423 *
1424 * DESCRIPTION: update FPS information in preview buffer.
1425 *
1426 * PARAMETERS :
1427 * @metadata : pointer to metadata buffer
1428 * @frame_number: frame_number to look for in pending buffer list
1429 *
1430 * RETURN : None
1431 *
1432 *==========================================================================*/
1433void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1434 uint32_t frame_number)
1435{
1436 // Mark all pending buffers for this particular request
1437 // with corresponding framerate information
1438 for (List<PendingBuffersInRequest>::iterator req =
1439 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1440 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1441 for(List<PendingBufferInfo>::iterator j =
1442 req->mPendingBufferList.begin();
1443 j != req->mPendingBufferList.end(); j++) {
1444 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1445 if ((req->frame_number == frame_number) &&
1446 (channel->getStreamTypeMask() &
1447 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1448 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1449 CAM_INTF_PARM_FPS_RANGE, metadata) {
1450 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1451 struct private_handle_t *priv_handle =
1452 (struct private_handle_t *)(*(j->buffer));
1453 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1454 }
1455 }
1456 }
1457 }
1458}
1459
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001460/*==============================================================================
1461 * FUNCTION : updateTimeStampInPendingBuffers
1462 *
1463 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1464 * of a frame number
1465 *
1466 * PARAMETERS :
1467 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1468 * @timestamp : timestamp to be set
1469 *
1470 * RETURN : None
1471 *
1472 *==========================================================================*/
1473void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1474 uint32_t frameNumber, nsecs_t timestamp)
1475{
1476 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1477 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1478 if (req->frame_number != frameNumber)
1479 continue;
1480
1481 for (auto k = req->mPendingBufferList.begin();
1482 k != req->mPendingBufferList.end(); k++ ) {
1483 struct private_handle_t *priv_handle =
1484 (struct private_handle_t *) (*(k->buffer));
1485 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1486 }
1487 }
1488 return;
1489}
1490
Thierry Strudel3d639192016-09-09 11:52:26 -07001491/*===========================================================================
1492 * FUNCTION : configureStreams
1493 *
1494 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1495 * and output streams.
1496 *
1497 * PARAMETERS :
1498 * @stream_list : streams to be configured
1499 *
1500 * RETURN :
1501 *
1502 *==========================================================================*/
1503int QCamera3HardwareInterface::configureStreams(
1504 camera3_stream_configuration_t *streamList)
1505{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001506 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001507 int rc = 0;
1508
1509 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001510 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001512 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001513
1514 return rc;
1515}
1516
1517/*===========================================================================
1518 * FUNCTION : configureStreamsPerfLocked
1519 *
1520 * DESCRIPTION: configureStreams while perfLock is held.
1521 *
1522 * PARAMETERS :
1523 * @stream_list : streams to be configured
1524 *
1525 * RETURN : int32_t type of status
1526 * NO_ERROR -- success
1527 * none-zero failure code
1528 *==========================================================================*/
1529int QCamera3HardwareInterface::configureStreamsPerfLocked(
1530 camera3_stream_configuration_t *streamList)
1531{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001532 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 int rc = 0;
1534
1535 // Sanity check stream_list
1536 if (streamList == NULL) {
1537 LOGE("NULL stream configuration");
1538 return BAD_VALUE;
1539 }
1540 if (streamList->streams == NULL) {
1541 LOGE("NULL stream list");
1542 return BAD_VALUE;
1543 }
1544
1545 if (streamList->num_streams < 1) {
1546 LOGE("Bad number of streams requested: %d",
1547 streamList->num_streams);
1548 return BAD_VALUE;
1549 }
1550
1551 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1552 LOGE("Maximum number of streams %d exceeded: %d",
1553 MAX_NUM_STREAMS, streamList->num_streams);
1554 return BAD_VALUE;
1555 }
1556
1557 mOpMode = streamList->operation_mode;
1558 LOGD("mOpMode: %d", mOpMode);
1559
1560 /* first invalidate all the steams in the mStreamList
1561 * if they appear again, they will be validated */
1562 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1563 it != mStreamInfo.end(); it++) {
1564 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1565 if (channel) {
1566 channel->stop();
1567 }
1568 (*it)->status = INVALID;
1569 }
1570
1571 if (mRawDumpChannel) {
1572 mRawDumpChannel->stop();
1573 delete mRawDumpChannel;
1574 mRawDumpChannel = NULL;
1575 }
1576
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001577 if (mHdrPlusRawSrcChannel) {
1578 mHdrPlusRawSrcChannel->stop();
1579 delete mHdrPlusRawSrcChannel;
1580 mHdrPlusRawSrcChannel = NULL;
1581 }
1582
Thierry Strudel3d639192016-09-09 11:52:26 -07001583 if (mSupportChannel)
1584 mSupportChannel->stop();
1585
1586 if (mAnalysisChannel) {
1587 mAnalysisChannel->stop();
1588 }
1589 if (mMetadataChannel) {
1590 /* If content of mStreamInfo is not 0, there is metadata stream */
1591 mMetadataChannel->stop();
1592 }
1593 if (mChannelHandle) {
1594 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1595 mChannelHandle);
1596 LOGD("stopping channel %d", mChannelHandle);
1597 }
1598
1599 pthread_mutex_lock(&mMutex);
1600
1601 // Check state
1602 switch (mState) {
1603 case INITIALIZED:
1604 case CONFIGURED:
1605 case STARTED:
1606 /* valid state */
1607 break;
1608 default:
1609 LOGE("Invalid state %d", mState);
1610 pthread_mutex_unlock(&mMutex);
1611 return -ENODEV;
1612 }
1613
1614 /* Check whether we have video stream */
1615 m_bIs4KVideo = false;
1616 m_bIsVideo = false;
1617 m_bEisSupportedSize = false;
1618 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001619 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001621 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 uint32_t videoWidth = 0U;
1623 uint32_t videoHeight = 0U;
1624 size_t rawStreamCnt = 0;
1625 size_t stallStreamCnt = 0;
1626 size_t processedStreamCnt = 0;
1627 // Number of streams on ISP encoder path
1628 size_t numStreamsOnEncoder = 0;
1629 size_t numYuv888OnEncoder = 0;
1630 bool bYuv888OverrideJpeg = false;
1631 cam_dimension_t largeYuv888Size = {0, 0};
1632 cam_dimension_t maxViewfinderSize = {0, 0};
1633 bool bJpegExceeds4K = false;
1634 bool bJpegOnEncoder = false;
1635 bool bUseCommonFeatureMask = false;
1636 cam_feature_mask_t commonFeatureMask = 0;
1637 bool bSmallJpegSize = false;
1638 uint32_t width_ratio;
1639 uint32_t height_ratio;
1640 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1641 camera3_stream_t *inputStream = NULL;
1642 bool isJpeg = false;
1643 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001644 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001645
1646 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1647
1648 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001650 uint8_t eis_prop_set;
1651 uint32_t maxEisWidth = 0;
1652 uint32_t maxEisHeight = 0;
1653
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001654 // Initialize all instant AEC related variables
1655 mInstantAEC = false;
1656 mResetInstantAEC = false;
1657 mInstantAECSettledFrameNumber = 0;
1658 mAecSkipDisplayFrameBound = 0;
1659 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001660 mCurrFeatureState = 0;
1661 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001662
Thierry Strudel3d639192016-09-09 11:52:26 -07001663 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1664
1665 size_t count = IS_TYPE_MAX;
1666 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1667 for (size_t i = 0; i < count; i++) {
1668 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001669 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1670 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 break;
1672 }
1673 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001674 count = CAM_OPT_STAB_MAX;
1675 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1676 for (size_t i = 0; i < count; i++) {
1677 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1678 oisSupported = true;
1679 break;
1680 }
1681 }
1682
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001683 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001684 maxEisWidth = MAX_EIS_WIDTH;
1685 maxEisHeight = MAX_EIS_HEIGHT;
1686 }
1687
1688 /* EIS setprop control */
1689 char eis_prop[PROPERTY_VALUE_MAX];
1690 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001691 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001692 eis_prop_set = (uint8_t)atoi(eis_prop);
1693
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001694 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001695 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1696
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001697 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1698 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1699
Thierry Strudel3d639192016-09-09 11:52:26 -07001700 /* stream configurations */
1701 for (size_t i = 0; i < streamList->num_streams; i++) {
1702 camera3_stream_t *newStream = streamList->streams[i];
1703 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1704 "height = %d, rotation = %d, usage = 0x%x",
1705 i, newStream->stream_type, newStream->format,
1706 newStream->width, newStream->height, newStream->rotation,
1707 newStream->usage);
1708 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1709 newStream->stream_type == CAMERA3_STREAM_INPUT){
1710 isZsl = true;
1711 }
1712 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1713 inputStream = newStream;
1714 }
1715
Emilian Peev7650c122017-01-19 08:24:33 -08001716 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1717 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001718 isJpeg = true;
1719 jpegSize.width = newStream->width;
1720 jpegSize.height = newStream->height;
1721 if (newStream->width > VIDEO_4K_WIDTH ||
1722 newStream->height > VIDEO_4K_HEIGHT)
1723 bJpegExceeds4K = true;
1724 }
1725
1726 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1727 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1728 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001729 // In HAL3 we can have multiple different video streams.
1730 // The variables video width and height are used below as
1731 // dimensions of the biggest of them
1732 if (videoWidth < newStream->width ||
1733 videoHeight < newStream->height) {
1734 videoWidth = newStream->width;
1735 videoHeight = newStream->height;
1736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1738 (VIDEO_4K_HEIGHT <= newStream->height)) {
1739 m_bIs4KVideo = true;
1740 }
1741 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1742 (newStream->height <= maxEisHeight);
1743 }
1744 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1745 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1746 switch (newStream->format) {
1747 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001748 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1749 depthPresent = true;
1750 break;
1751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001752 stallStreamCnt++;
1753 if (isOnEncoder(maxViewfinderSize, newStream->width,
1754 newStream->height)) {
1755 numStreamsOnEncoder++;
1756 bJpegOnEncoder = true;
1757 }
1758 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1759 newStream->width);
1760 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1761 newStream->height);;
1762 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1763 "FATAL: max_downscale_factor cannot be zero and so assert");
1764 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1765 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1766 LOGH("Setting small jpeg size flag to true");
1767 bSmallJpegSize = true;
1768 }
1769 break;
1770 case HAL_PIXEL_FORMAT_RAW10:
1771 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1772 case HAL_PIXEL_FORMAT_RAW16:
1773 rawStreamCnt++;
1774 break;
1775 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1776 processedStreamCnt++;
1777 if (isOnEncoder(maxViewfinderSize, newStream->width,
1778 newStream->height)) {
1779 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1780 !IS_USAGE_ZSL(newStream->usage)) {
1781 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1782 }
1783 numStreamsOnEncoder++;
1784 }
1785 break;
1786 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1787 processedStreamCnt++;
1788 if (isOnEncoder(maxViewfinderSize, newStream->width,
1789 newStream->height)) {
1790 // If Yuv888 size is not greater than 4K, set feature mask
1791 // to SUPERSET so that it support concurrent request on
1792 // YUV and JPEG.
1793 if (newStream->width <= VIDEO_4K_WIDTH &&
1794 newStream->height <= VIDEO_4K_HEIGHT) {
1795 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1796 }
1797 numStreamsOnEncoder++;
1798 numYuv888OnEncoder++;
1799 largeYuv888Size.width = newStream->width;
1800 largeYuv888Size.height = newStream->height;
1801 }
1802 break;
1803 default:
1804 processedStreamCnt++;
1805 if (isOnEncoder(maxViewfinderSize, newStream->width,
1806 newStream->height)) {
1807 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1808 numStreamsOnEncoder++;
1809 }
1810 break;
1811 }
1812
1813 }
1814 }
1815
1816 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1817 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1818 !m_bIsVideo) {
1819 m_bEisEnable = false;
1820 }
1821
Thierry Strudel54dc9782017-02-15 12:12:10 -08001822 uint8_t forceEnableTnr = 0;
1823 char tnr_prop[PROPERTY_VALUE_MAX];
1824 memset(tnr_prop, 0, sizeof(tnr_prop));
1825 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1826 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1827
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 /* Logic to enable/disable TNR based on specific config size/etc.*/
1829 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1830 ((videoWidth == 1920 && videoHeight == 1080) ||
1831 (videoWidth == 1280 && videoHeight == 720)) &&
1832 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1833 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001834 else if (forceEnableTnr)
1835 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001836
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001837 char videoHdrProp[PROPERTY_VALUE_MAX];
1838 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1839 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1840 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1841
1842 if (hdr_mode_prop == 1 && m_bIsVideo &&
1843 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1844 m_bVideoHdrEnabled = true;
1845 else
1846 m_bVideoHdrEnabled = false;
1847
1848
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 /* Check if num_streams is sane */
1850 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1851 rawStreamCnt > MAX_RAW_STREAMS ||
1852 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1853 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1854 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1855 pthread_mutex_unlock(&mMutex);
1856 return -EINVAL;
1857 }
1858 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001859 if (isZsl && m_bIs4KVideo) {
1860 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 pthread_mutex_unlock(&mMutex);
1862 return -EINVAL;
1863 }
1864 /* Check if stream sizes are sane */
1865 if (numStreamsOnEncoder > 2) {
1866 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1867 pthread_mutex_unlock(&mMutex);
1868 return -EINVAL;
1869 } else if (1 < numStreamsOnEncoder){
1870 bUseCommonFeatureMask = true;
1871 LOGH("Multiple streams above max viewfinder size, common mask needed");
1872 }
1873
1874 /* Check if BLOB size is greater than 4k in 4k recording case */
1875 if (m_bIs4KVideo && bJpegExceeds4K) {
1876 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1877 pthread_mutex_unlock(&mMutex);
1878 return -EINVAL;
1879 }
1880
Emilian Peev7650c122017-01-19 08:24:33 -08001881 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1882 depthPresent) {
1883 LOGE("HAL doesn't support depth streams in HFR mode!");
1884 pthread_mutex_unlock(&mMutex);
1885 return -EINVAL;
1886 }
1887
Thierry Strudel3d639192016-09-09 11:52:26 -07001888 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1889 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1890 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1891 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1892 // configurations:
1893 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1894 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1895 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1896 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1897 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1898 __func__);
1899 pthread_mutex_unlock(&mMutex);
1900 return -EINVAL;
1901 }
1902
1903 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1904 // the YUV stream's size is greater or equal to the JPEG size, set common
1905 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1906 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1907 jpegSize.width, jpegSize.height) &&
1908 largeYuv888Size.width > jpegSize.width &&
1909 largeYuv888Size.height > jpegSize.height) {
1910 bYuv888OverrideJpeg = true;
1911 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1912 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 }
1914
1915 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1916 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1917 commonFeatureMask);
1918 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1919 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1920
1921 rc = validateStreamDimensions(streamList);
1922 if (rc == NO_ERROR) {
1923 rc = validateStreamRotations(streamList);
1924 }
1925 if (rc != NO_ERROR) {
1926 LOGE("Invalid stream configuration requested!");
1927 pthread_mutex_unlock(&mMutex);
1928 return rc;
1929 }
1930
1931 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1932 for (size_t i = 0; i < streamList->num_streams; i++) {
1933 camera3_stream_t *newStream = streamList->streams[i];
1934 LOGH("newStream type = %d, stream format = %d "
1935 "stream size : %d x %d, stream rotation = %d",
1936 newStream->stream_type, newStream->format,
1937 newStream->width, newStream->height, newStream->rotation);
1938 //if the stream is in the mStreamList validate it
1939 bool stream_exists = false;
1940 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1941 it != mStreamInfo.end(); it++) {
1942 if ((*it)->stream == newStream) {
1943 QCamera3ProcessingChannel *channel =
1944 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1945 stream_exists = true;
1946 if (channel)
1947 delete channel;
1948 (*it)->status = VALID;
1949 (*it)->stream->priv = NULL;
1950 (*it)->channel = NULL;
1951 }
1952 }
1953 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1954 //new stream
1955 stream_info_t* stream_info;
1956 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1957 if (!stream_info) {
1958 LOGE("Could not allocate stream info");
1959 rc = -ENOMEM;
1960 pthread_mutex_unlock(&mMutex);
1961 return rc;
1962 }
1963 stream_info->stream = newStream;
1964 stream_info->status = VALID;
1965 stream_info->channel = NULL;
1966 mStreamInfo.push_back(stream_info);
1967 }
1968 /* Covers Opaque ZSL and API1 F/W ZSL */
1969 if (IS_USAGE_ZSL(newStream->usage)
1970 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1971 if (zslStream != NULL) {
1972 LOGE("Multiple input/reprocess streams requested!");
1973 pthread_mutex_unlock(&mMutex);
1974 return BAD_VALUE;
1975 }
1976 zslStream = newStream;
1977 }
1978 /* Covers YUV reprocess */
1979 if (inputStream != NULL) {
1980 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1981 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1982 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1983 && inputStream->width == newStream->width
1984 && inputStream->height == newStream->height) {
1985 if (zslStream != NULL) {
1986 /* This scenario indicates multiple YUV streams with same size
1987 * as input stream have been requested, since zsl stream handle
1988 * is solely use for the purpose of overriding the size of streams
1989 * which share h/w streams we will just make a guess here as to
1990 * which of the stream is a ZSL stream, this will be refactored
1991 * once we make generic logic for streams sharing encoder output
1992 */
1993 LOGH("Warning, Multiple ip/reprocess streams requested!");
1994 }
1995 zslStream = newStream;
1996 }
1997 }
1998 }
1999
2000 /* If a zsl stream is set, we know that we have configured at least one input or
2001 bidirectional stream */
2002 if (NULL != zslStream) {
2003 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2004 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2005 mInputStreamInfo.format = zslStream->format;
2006 mInputStreamInfo.usage = zslStream->usage;
2007 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2008 mInputStreamInfo.dim.width,
2009 mInputStreamInfo.dim.height,
2010 mInputStreamInfo.format, mInputStreamInfo.usage);
2011 }
2012
2013 cleanAndSortStreamInfo();
2014 if (mMetadataChannel) {
2015 delete mMetadataChannel;
2016 mMetadataChannel = NULL;
2017 }
2018 if (mSupportChannel) {
2019 delete mSupportChannel;
2020 mSupportChannel = NULL;
2021 }
2022
2023 if (mAnalysisChannel) {
2024 delete mAnalysisChannel;
2025 mAnalysisChannel = NULL;
2026 }
2027
2028 if (mDummyBatchChannel) {
2029 delete mDummyBatchChannel;
2030 mDummyBatchChannel = NULL;
2031 }
2032
Emilian Peev7650c122017-01-19 08:24:33 -08002033 if (mDepthChannel) {
2034 mDepthChannel = NULL;
2035 }
2036
Thierry Strudel2896d122017-02-23 19:18:03 -08002037 char is_type_value[PROPERTY_VALUE_MAX];
2038 property_get("persist.camera.is_type", is_type_value, "4");
2039 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2040
Thierry Strudel3d639192016-09-09 11:52:26 -07002041 //Create metadata channel and initialize it
2042 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2043 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2044 gCamCapability[mCameraId]->color_arrangement);
2045 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2046 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002047 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002048 if (mMetadataChannel == NULL) {
2049 LOGE("failed to allocate metadata channel");
2050 rc = -ENOMEM;
2051 pthread_mutex_unlock(&mMutex);
2052 return rc;
2053 }
2054 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2055 if (rc < 0) {
2056 LOGE("metadata channel initialization failed");
2057 delete mMetadataChannel;
2058 mMetadataChannel = NULL;
2059 pthread_mutex_unlock(&mMutex);
2060 return rc;
2061 }
2062
Thierry Strudel2896d122017-02-23 19:18:03 -08002063 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002064 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002065 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002066 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2067 /* Allocate channel objects for the requested streams */
2068 for (size_t i = 0; i < streamList->num_streams; i++) {
2069 camera3_stream_t *newStream = streamList->streams[i];
2070 uint32_t stream_usage = newStream->usage;
2071 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2072 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2073 struct camera_info *p_info = NULL;
2074 pthread_mutex_lock(&gCamLock);
2075 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2076 pthread_mutex_unlock(&gCamLock);
2077 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2078 || IS_USAGE_ZSL(newStream->usage)) &&
2079 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002080 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002081 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002082 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2083 if (bUseCommonFeatureMask)
2084 zsl_ppmask = commonFeatureMask;
2085 else
2086 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002087 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002088 if (numStreamsOnEncoder > 0)
2089 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2090 else
2091 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002092 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002093 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002094 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002095 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002096 LOGH("Input stream configured, reprocess config");
2097 } else {
2098 //for non zsl streams find out the format
2099 switch (newStream->format) {
2100 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2101 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002102 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2104 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2105 /* add additional features to pp feature mask */
2106 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2107 mStreamConfigInfo.num_streams);
2108
2109 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2110 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2111 CAM_STREAM_TYPE_VIDEO;
2112 if (m_bTnrEnabled && m_bTnrVideo) {
2113 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2114 CAM_QCOM_FEATURE_CPP_TNR;
2115 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2116 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2117 ~CAM_QCOM_FEATURE_CDS;
2118 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002119 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2120 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2121 CAM_QTI_FEATURE_PPEISCORE;
2122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 } else {
2124 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2125 CAM_STREAM_TYPE_PREVIEW;
2126 if (m_bTnrEnabled && m_bTnrPreview) {
2127 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2128 CAM_QCOM_FEATURE_CPP_TNR;
2129 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2130 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2131 ~CAM_QCOM_FEATURE_CDS;
2132 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002133 if(!m_bSwTnrPreview) {
2134 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2135 ~CAM_QTI_FEATURE_SW_TNR;
2136 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002137 padding_info.width_padding = mSurfaceStridePadding;
2138 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002139 previewSize.width = (int32_t)newStream->width;
2140 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002141 }
2142 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2143 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2144 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2145 newStream->height;
2146 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2147 newStream->width;
2148 }
2149 }
2150 break;
2151 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002152 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002153 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2154 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2155 if (bUseCommonFeatureMask)
2156 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2157 commonFeatureMask;
2158 else
2159 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2160 CAM_QCOM_FEATURE_NONE;
2161 } else {
2162 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2163 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2164 }
2165 break;
2166 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002167 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002168 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2169 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2170 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2171 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2172 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002173 /* Remove rotation if it is not supported
2174 for 4K LiveVideo snapshot case (online processing) */
2175 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2176 CAM_QCOM_FEATURE_ROTATION)) {
2177 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2178 &= ~CAM_QCOM_FEATURE_ROTATION;
2179 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002180 } else {
2181 if (bUseCommonFeatureMask &&
2182 isOnEncoder(maxViewfinderSize, newStream->width,
2183 newStream->height)) {
2184 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2185 } else {
2186 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2187 }
2188 }
2189 if (isZsl) {
2190 if (zslStream) {
2191 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2192 (int32_t)zslStream->width;
2193 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2194 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002195 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2196 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002197 } else {
2198 LOGE("Error, No ZSL stream identified");
2199 pthread_mutex_unlock(&mMutex);
2200 return -EINVAL;
2201 }
2202 } else if (m_bIs4KVideo) {
2203 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2204 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2205 } else if (bYuv888OverrideJpeg) {
2206 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2207 (int32_t)largeYuv888Size.width;
2208 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2209 (int32_t)largeYuv888Size.height;
2210 }
2211 break;
2212 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2213 case HAL_PIXEL_FORMAT_RAW16:
2214 case HAL_PIXEL_FORMAT_RAW10:
2215 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2216 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2217 isRawStreamRequested = true;
2218 break;
2219 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002220 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002221 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2222 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2223 break;
2224 }
2225 }
2226
2227 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2228 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2229 gCamCapability[mCameraId]->color_arrangement);
2230
2231 if (newStream->priv == NULL) {
2232 //New stream, construct channel
2233 switch (newStream->stream_type) {
2234 case CAMERA3_STREAM_INPUT:
2235 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2236 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2237 break;
2238 case CAMERA3_STREAM_BIDIRECTIONAL:
2239 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2240 GRALLOC_USAGE_HW_CAMERA_WRITE;
2241 break;
2242 case CAMERA3_STREAM_OUTPUT:
2243 /* For video encoding stream, set read/write rarely
2244 * flag so that they may be set to un-cached */
2245 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2246 newStream->usage |=
2247 (GRALLOC_USAGE_SW_READ_RARELY |
2248 GRALLOC_USAGE_SW_WRITE_RARELY |
2249 GRALLOC_USAGE_HW_CAMERA_WRITE);
2250 else if (IS_USAGE_ZSL(newStream->usage))
2251 {
2252 LOGD("ZSL usage flag skipping");
2253 }
2254 else if (newStream == zslStream
2255 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2256 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2257 } else
2258 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2259 break;
2260 default:
2261 LOGE("Invalid stream_type %d", newStream->stream_type);
2262 break;
2263 }
2264
2265 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2266 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2267 QCamera3ProcessingChannel *channel = NULL;
2268 switch (newStream->format) {
2269 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2270 if ((newStream->usage &
2271 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2272 (streamList->operation_mode ==
2273 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2274 ) {
2275 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2276 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002277 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002278 this,
2279 newStream,
2280 (cam_stream_type_t)
2281 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2282 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2283 mMetadataChannel,
2284 0); //heap buffers are not required for HFR video channel
2285 if (channel == NULL) {
2286 LOGE("allocation of channel failed");
2287 pthread_mutex_unlock(&mMutex);
2288 return -ENOMEM;
2289 }
2290 //channel->getNumBuffers() will return 0 here so use
2291 //MAX_INFLIGH_HFR_REQUESTS
2292 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2293 newStream->priv = channel;
2294 LOGI("num video buffers in HFR mode: %d",
2295 MAX_INFLIGHT_HFR_REQUESTS);
2296 } else {
2297 /* Copy stream contents in HFR preview only case to create
2298 * dummy batch channel so that sensor streaming is in
2299 * HFR mode */
2300 if (!m_bIsVideo && (streamList->operation_mode ==
2301 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2302 mDummyBatchStream = *newStream;
2303 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002304 int bufferCount = MAX_INFLIGHT_REQUESTS;
2305 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2306 CAM_STREAM_TYPE_VIDEO) {
2307 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2308 bufferCount = MAX_VIDEO_BUFFERS;
2309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002310 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2311 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002312 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002313 this,
2314 newStream,
2315 (cam_stream_type_t)
2316 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2317 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2318 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002319 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 if (channel == NULL) {
2321 LOGE("allocation of channel failed");
2322 pthread_mutex_unlock(&mMutex);
2323 return -ENOMEM;
2324 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002325 /* disable UBWC for preview, though supported,
2326 * to take advantage of CPP duplication */
2327 if (m_bIsVideo && (!mCommon.isVideoUBWCEnabled()) &&
2328 (previewSize.width == (int32_t)videoWidth)&&
2329 (previewSize.height == (int32_t)videoHeight)){
2330 channel->setUBWCEnabled(false);
2331 }else {
2332 channel->setUBWCEnabled(true);
2333 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 newStream->max_buffers = channel->getNumBuffers();
2335 newStream->priv = channel;
2336 }
2337 break;
2338 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2339 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2340 mChannelHandle,
2341 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002342 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002343 this,
2344 newStream,
2345 (cam_stream_type_t)
2346 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2347 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2348 mMetadataChannel);
2349 if (channel == NULL) {
2350 LOGE("allocation of YUV channel failed");
2351 pthread_mutex_unlock(&mMutex);
2352 return -ENOMEM;
2353 }
2354 newStream->max_buffers = channel->getNumBuffers();
2355 newStream->priv = channel;
2356 break;
2357 }
2358 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2359 case HAL_PIXEL_FORMAT_RAW16:
2360 case HAL_PIXEL_FORMAT_RAW10:
2361 mRawChannel = new QCamera3RawChannel(
2362 mCameraHandle->camera_handle, mChannelHandle,
2363 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002364 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002365 this, newStream,
2366 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2367 mMetadataChannel,
2368 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2369 if (mRawChannel == NULL) {
2370 LOGE("allocation of raw channel failed");
2371 pthread_mutex_unlock(&mMutex);
2372 return -ENOMEM;
2373 }
2374 newStream->max_buffers = mRawChannel->getNumBuffers();
2375 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2376 break;
2377 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002378 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2379 mDepthChannel = new QCamera3DepthChannel(
2380 mCameraHandle->camera_handle, mChannelHandle,
2381 mCameraHandle->ops, NULL, NULL, &padding_info,
2382 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2383 mMetadataChannel);
2384 if (NULL == mDepthChannel) {
2385 LOGE("Allocation of depth channel failed");
2386 pthread_mutex_unlock(&mMutex);
2387 return NO_MEMORY;
2388 }
2389 newStream->priv = mDepthChannel;
2390 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2391 } else {
2392 // Max live snapshot inflight buffer is 1. This is to mitigate
2393 // frame drop issues for video snapshot. The more buffers being
2394 // allocated, the more frame drops there are.
2395 mPictureChannel = new QCamera3PicChannel(
2396 mCameraHandle->camera_handle, mChannelHandle,
2397 mCameraHandle->ops, captureResultCb,
2398 setBufferErrorStatus, &padding_info, this, newStream,
2399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2400 m_bIs4KVideo, isZsl, mMetadataChannel,
2401 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2402 if (mPictureChannel == NULL) {
2403 LOGE("allocation of channel failed");
2404 pthread_mutex_unlock(&mMutex);
2405 return -ENOMEM;
2406 }
2407 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2408 newStream->max_buffers = mPictureChannel->getNumBuffers();
2409 mPictureChannel->overrideYuvSize(
2410 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2411 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002413 break;
2414
2415 default:
2416 LOGE("not a supported format 0x%x", newStream->format);
2417 break;
2418 }
2419 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2420 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2421 } else {
2422 LOGE("Error, Unknown stream type");
2423 pthread_mutex_unlock(&mMutex);
2424 return -EINVAL;
2425 }
2426
2427 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2428 if (channel != NULL && channel->isUBWCEnabled()) {
2429 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002430 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2431 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002432 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2433 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2434 }
2435 }
2436
2437 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2438 it != mStreamInfo.end(); it++) {
2439 if ((*it)->stream == newStream) {
2440 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2441 break;
2442 }
2443 }
2444 } else {
2445 // Channel already exists for this stream
2446 // Do nothing for now
2447 }
2448 padding_info = gCamCapability[mCameraId]->padding_info;
2449
Emilian Peev7650c122017-01-19 08:24:33 -08002450 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002451 * since there is no real stream associated with it
2452 */
Emilian Peev7650c122017-01-19 08:24:33 -08002453 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2454 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 }
2458
Thierry Strudel2896d122017-02-23 19:18:03 -08002459 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2460 onlyRaw = false;
2461 }
2462
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002463 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002464 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002465 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002466 cam_analysis_info_t analysisInfo;
2467 int32_t ret = NO_ERROR;
2468 ret = mCommon.getAnalysisInfo(
2469 FALSE,
2470 analysisFeatureMask,
2471 &analysisInfo);
2472 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002473 cam_color_filter_arrangement_t analysis_color_arrangement =
2474 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2475 CAM_FILTER_ARRANGEMENT_Y :
2476 gCamCapability[mCameraId]->color_arrangement);
2477 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2478 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002479 cam_dimension_t analysisDim;
2480 analysisDim = mCommon.getMatchingDimension(previewSize,
2481 analysisInfo.analysis_recommended_res);
2482
2483 mAnalysisChannel = new QCamera3SupportChannel(
2484 mCameraHandle->camera_handle,
2485 mChannelHandle,
2486 mCameraHandle->ops,
2487 &analysisInfo.analysis_padding_info,
2488 analysisFeatureMask,
2489 CAM_STREAM_TYPE_ANALYSIS,
2490 &analysisDim,
2491 (analysisInfo.analysis_format
2492 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2493 : CAM_FORMAT_YUV_420_NV21),
2494 analysisInfo.hw_analysis_supported,
2495 gCamCapability[mCameraId]->color_arrangement,
2496 this,
2497 0); // force buffer count to 0
2498 } else {
2499 LOGW("getAnalysisInfo failed, ret = %d", ret);
2500 }
2501 if (!mAnalysisChannel) {
2502 LOGW("Analysis channel cannot be created");
2503 }
2504 }
2505
Thierry Strudel3d639192016-09-09 11:52:26 -07002506 //RAW DUMP channel
2507 if (mEnableRawDump && isRawStreamRequested == false){
2508 cam_dimension_t rawDumpSize;
2509 rawDumpSize = getMaxRawSize(mCameraId);
2510 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2511 setPAAFSupport(rawDumpFeatureMask,
2512 CAM_STREAM_TYPE_RAW,
2513 gCamCapability[mCameraId]->color_arrangement);
2514 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2515 mChannelHandle,
2516 mCameraHandle->ops,
2517 rawDumpSize,
2518 &padding_info,
2519 this, rawDumpFeatureMask);
2520 if (!mRawDumpChannel) {
2521 LOGE("Raw Dump channel cannot be created");
2522 pthread_mutex_unlock(&mMutex);
2523 return -ENOMEM;
2524 }
2525 }
2526
Chien-Yu Chenee335912017-02-09 17:53:20 -08002527 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002528 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002529 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002530 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2531 "HDR+ RAW source channel is not created.",
2532 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002533 } else {
2534 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2535 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2536 setPAAFSupport(hdrPlusRawFeatureMask,
2537 CAM_STREAM_TYPE_RAW,
2538 gCamCapability[mCameraId]->color_arrangement);
2539 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2540 mChannelHandle,
2541 mCameraHandle->ops,
2542 rawSize,
2543 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002544 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002545 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002546 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002547 if (!mHdrPlusRawSrcChannel) {
2548 LOGE("HDR+ Raw Source channel cannot be created");
2549 pthread_mutex_unlock(&mMutex);
2550 return -ENOMEM;
2551 }
2552 }
2553 }
2554
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 if (mAnalysisChannel) {
2556 cam_analysis_info_t analysisInfo;
2557 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2558 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2559 CAM_STREAM_TYPE_ANALYSIS;
2560 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2561 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002562 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2564 &analysisInfo);
2565 if (rc != NO_ERROR) {
2566 LOGE("getAnalysisInfo failed, ret = %d", rc);
2567 pthread_mutex_unlock(&mMutex);
2568 return rc;
2569 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002570 cam_color_filter_arrangement_t analysis_color_arrangement =
2571 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2572 CAM_FILTER_ARRANGEMENT_Y :
2573 gCamCapability[mCameraId]->color_arrangement);
2574 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2575 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2576 analysis_color_arrangement);
2577
Thierry Strudel3d639192016-09-09 11:52:26 -07002578 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002579 mCommon.getMatchingDimension(previewSize,
2580 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 mStreamConfigInfo.num_streams++;
2582 }
2583
Thierry Strudel2896d122017-02-23 19:18:03 -08002584 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002585 cam_analysis_info_t supportInfo;
2586 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2587 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2588 setPAAFSupport(callbackFeatureMask,
2589 CAM_STREAM_TYPE_CALLBACK,
2590 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002591 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002592 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002593 if (ret != NO_ERROR) {
2594 /* Ignore the error for Mono camera
2595 * because the PAAF bit mask is only set
2596 * for CAM_STREAM_TYPE_ANALYSIS stream type
2597 */
2598 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2599 LOGW("getAnalysisInfo failed, ret = %d", ret);
2600 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002601 }
2602 mSupportChannel = new QCamera3SupportChannel(
2603 mCameraHandle->camera_handle,
2604 mChannelHandle,
2605 mCameraHandle->ops,
2606 &gCamCapability[mCameraId]->padding_info,
2607 callbackFeatureMask,
2608 CAM_STREAM_TYPE_CALLBACK,
2609 &QCamera3SupportChannel::kDim,
2610 CAM_FORMAT_YUV_420_NV21,
2611 supportInfo.hw_analysis_supported,
2612 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002613 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002614 if (!mSupportChannel) {
2615 LOGE("dummy channel cannot be created");
2616 pthread_mutex_unlock(&mMutex);
2617 return -ENOMEM;
2618 }
2619 }
2620
2621 if (mSupportChannel) {
2622 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2623 QCamera3SupportChannel::kDim;
2624 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2625 CAM_STREAM_TYPE_CALLBACK;
2626 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2627 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2628 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2629 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2630 gCamCapability[mCameraId]->color_arrangement);
2631 mStreamConfigInfo.num_streams++;
2632 }
2633
2634 if (mRawDumpChannel) {
2635 cam_dimension_t rawSize;
2636 rawSize = getMaxRawSize(mCameraId);
2637 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2638 rawSize;
2639 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2640 CAM_STREAM_TYPE_RAW;
2641 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2642 CAM_QCOM_FEATURE_NONE;
2643 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2644 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2645 gCamCapability[mCameraId]->color_arrangement);
2646 mStreamConfigInfo.num_streams++;
2647 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002648
2649 if (mHdrPlusRawSrcChannel) {
2650 cam_dimension_t rawSize;
2651 rawSize = getMaxRawSize(mCameraId);
2652 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2653 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2654 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2655 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2656 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2657 gCamCapability[mCameraId]->color_arrangement);
2658 mStreamConfigInfo.num_streams++;
2659 }
2660
Thierry Strudel3d639192016-09-09 11:52:26 -07002661 /* In HFR mode, if video stream is not added, create a dummy channel so that
2662 * ISP can create a batch mode even for preview only case. This channel is
2663 * never 'start'ed (no stream-on), it is only 'initialized' */
2664 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2665 !m_bIsVideo) {
2666 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2667 setPAAFSupport(dummyFeatureMask,
2668 CAM_STREAM_TYPE_VIDEO,
2669 gCamCapability[mCameraId]->color_arrangement);
2670 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2671 mChannelHandle,
2672 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002673 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 this,
2675 &mDummyBatchStream,
2676 CAM_STREAM_TYPE_VIDEO,
2677 dummyFeatureMask,
2678 mMetadataChannel);
2679 if (NULL == mDummyBatchChannel) {
2680 LOGE("creation of mDummyBatchChannel failed."
2681 "Preview will use non-hfr sensor mode ");
2682 }
2683 }
2684 if (mDummyBatchChannel) {
2685 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2686 mDummyBatchStream.width;
2687 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2688 mDummyBatchStream.height;
2689 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2690 CAM_STREAM_TYPE_VIDEO;
2691 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2692 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2693 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2694 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2695 gCamCapability[mCameraId]->color_arrangement);
2696 mStreamConfigInfo.num_streams++;
2697 }
2698
2699 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2700 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002701 m_bIs4KVideo ? 0 :
2702 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002703
2704 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2705 for (pendingRequestIterator i = mPendingRequestsList.begin();
2706 i != mPendingRequestsList.end();) {
2707 i = erasePendingRequest(i);
2708 }
2709 mPendingFrameDropList.clear();
2710 // Initialize/Reset the pending buffers list
2711 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2712 req.mPendingBufferList.clear();
2713 }
2714 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2715
Thierry Strudel3d639192016-09-09 11:52:26 -07002716 mCurJpegMeta.clear();
2717 //Get min frame duration for this streams configuration
2718 deriveMinFrameDuration();
2719
Chien-Yu Chenee335912017-02-09 17:53:20 -08002720 mFirstPreviewIntentSeen = false;
2721
2722 // Disable HRD+ if it's enabled;
2723 disableHdrPlusModeLocked();
2724
Thierry Strudel3d639192016-09-09 11:52:26 -07002725 // Update state
2726 mState = CONFIGURED;
2727
2728 pthread_mutex_unlock(&mMutex);
2729
2730 return rc;
2731}
2732
2733/*===========================================================================
2734 * FUNCTION : validateCaptureRequest
2735 *
2736 * DESCRIPTION: validate a capture request from camera service
2737 *
2738 * PARAMETERS :
2739 * @request : request from framework to process
2740 *
2741 * RETURN :
2742 *
2743 *==========================================================================*/
2744int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002745 camera3_capture_request_t *request,
2746 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002747{
2748 ssize_t idx = 0;
2749 const camera3_stream_buffer_t *b;
2750 CameraMetadata meta;
2751
2752 /* Sanity check the request */
2753 if (request == NULL) {
2754 LOGE("NULL capture request");
2755 return BAD_VALUE;
2756 }
2757
2758 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2759 /*settings cannot be null for the first request*/
2760 return BAD_VALUE;
2761 }
2762
2763 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002764 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2765 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002766 LOGE("Request %d: No output buffers provided!",
2767 __FUNCTION__, frameNumber);
2768 return BAD_VALUE;
2769 }
2770 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2771 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2772 request->num_output_buffers, MAX_NUM_STREAMS);
2773 return BAD_VALUE;
2774 }
2775 if (request->input_buffer != NULL) {
2776 b = request->input_buffer;
2777 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2778 LOGE("Request %d: Buffer %ld: Status not OK!",
2779 frameNumber, (long)idx);
2780 return BAD_VALUE;
2781 }
2782 if (b->release_fence != -1) {
2783 LOGE("Request %d: Buffer %ld: Has a release fence!",
2784 frameNumber, (long)idx);
2785 return BAD_VALUE;
2786 }
2787 if (b->buffer == NULL) {
2788 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2789 frameNumber, (long)idx);
2790 return BAD_VALUE;
2791 }
2792 }
2793
2794 // Validate all buffers
2795 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002796 if (b == NULL) {
2797 return BAD_VALUE;
2798 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002799 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 QCamera3ProcessingChannel *channel =
2801 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2802 if (channel == NULL) {
2803 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2804 frameNumber, (long)idx);
2805 return BAD_VALUE;
2806 }
2807 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2808 LOGE("Request %d: Buffer %ld: Status not OK!",
2809 frameNumber, (long)idx);
2810 return BAD_VALUE;
2811 }
2812 if (b->release_fence != -1) {
2813 LOGE("Request %d: Buffer %ld: Has a release fence!",
2814 frameNumber, (long)idx);
2815 return BAD_VALUE;
2816 }
2817 if (b->buffer == NULL) {
2818 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2819 frameNumber, (long)idx);
2820 return BAD_VALUE;
2821 }
2822 if (*(b->buffer) == NULL) {
2823 LOGE("Request %d: Buffer %ld: NULL private handle!",
2824 frameNumber, (long)idx);
2825 return BAD_VALUE;
2826 }
2827 idx++;
2828 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 return NO_ERROR;
2831}
2832
2833/*===========================================================================
2834 * FUNCTION : deriveMinFrameDuration
2835 *
2836 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2837 * on currently configured streams.
2838 *
2839 * PARAMETERS : NONE
2840 *
2841 * RETURN : NONE
2842 *
2843 *==========================================================================*/
2844void QCamera3HardwareInterface::deriveMinFrameDuration()
2845{
2846 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2847
2848 maxJpegDim = 0;
2849 maxProcessedDim = 0;
2850 maxRawDim = 0;
2851
2852 // Figure out maximum jpeg, processed, and raw dimensions
2853 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2854 it != mStreamInfo.end(); it++) {
2855
2856 // Input stream doesn't have valid stream_type
2857 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2858 continue;
2859
2860 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2861 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2862 if (dimension > maxJpegDim)
2863 maxJpegDim = dimension;
2864 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2865 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2866 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2867 if (dimension > maxRawDim)
2868 maxRawDim = dimension;
2869 } else {
2870 if (dimension > maxProcessedDim)
2871 maxProcessedDim = dimension;
2872 }
2873 }
2874
2875 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2876 MAX_SIZES_CNT);
2877
2878 //Assume all jpeg dimensions are in processed dimensions.
2879 if (maxJpegDim > maxProcessedDim)
2880 maxProcessedDim = maxJpegDim;
2881 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2882 if (maxProcessedDim > maxRawDim) {
2883 maxRawDim = INT32_MAX;
2884
2885 for (size_t i = 0; i < count; i++) {
2886 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2887 gCamCapability[mCameraId]->raw_dim[i].height;
2888 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2889 maxRawDim = dimension;
2890 }
2891 }
2892
2893 //Find minimum durations for processed, jpeg, and raw
2894 for (size_t i = 0; i < count; i++) {
2895 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2896 gCamCapability[mCameraId]->raw_dim[i].height) {
2897 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2898 break;
2899 }
2900 }
2901 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2902 for (size_t i = 0; i < count; i++) {
2903 if (maxProcessedDim ==
2904 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2905 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2906 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2907 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2908 break;
2909 }
2910 }
2911}
2912
2913/*===========================================================================
2914 * FUNCTION : getMinFrameDuration
2915 *
2916 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2917 * and current request configuration.
2918 *
2919 * PARAMETERS : @request: requset sent by the frameworks
2920 *
2921 * RETURN : min farme duration for a particular request
2922 *
2923 *==========================================================================*/
2924int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2925{
2926 bool hasJpegStream = false;
2927 bool hasRawStream = false;
2928 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2929 const camera3_stream_t *stream = request->output_buffers[i].stream;
2930 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2931 hasJpegStream = true;
2932 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2933 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2934 stream->format == HAL_PIXEL_FORMAT_RAW16)
2935 hasRawStream = true;
2936 }
2937
2938 if (!hasJpegStream)
2939 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2940 else
2941 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2942}
2943
2944/*===========================================================================
2945 * FUNCTION : handleBuffersDuringFlushLock
2946 *
2947 * DESCRIPTION: Account for buffers returned from back-end during flush
2948 * This function is executed while mMutex is held by the caller.
2949 *
2950 * PARAMETERS :
2951 * @buffer: image buffer for the callback
2952 *
2953 * RETURN :
2954 *==========================================================================*/
2955void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2956{
2957 bool buffer_found = false;
2958 for (List<PendingBuffersInRequest>::iterator req =
2959 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2960 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2961 for (List<PendingBufferInfo>::iterator i =
2962 req->mPendingBufferList.begin();
2963 i != req->mPendingBufferList.end(); i++) {
2964 if (i->buffer == buffer->buffer) {
2965 mPendingBuffersMap.numPendingBufsAtFlush--;
2966 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2967 buffer->buffer, req->frame_number,
2968 mPendingBuffersMap.numPendingBufsAtFlush);
2969 buffer_found = true;
2970 break;
2971 }
2972 }
2973 if (buffer_found) {
2974 break;
2975 }
2976 }
2977 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2978 //signal the flush()
2979 LOGD("All buffers returned to HAL. Continue flush");
2980 pthread_cond_signal(&mBuffersCond);
2981 }
2982}
2983
Thierry Strudel3d639192016-09-09 11:52:26 -07002984/*===========================================================================
2985 * FUNCTION : handleBatchMetadata
2986 *
2987 * DESCRIPTION: Handles metadata buffer callback in batch mode
2988 *
2989 * PARAMETERS : @metadata_buf: metadata buffer
2990 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2991 * the meta buf in this method
2992 *
2993 * RETURN :
2994 *
2995 *==========================================================================*/
2996void QCamera3HardwareInterface::handleBatchMetadata(
2997 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2998{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002999 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003000
3001 if (NULL == metadata_buf) {
3002 LOGE("metadata_buf is NULL");
3003 return;
3004 }
3005 /* In batch mode, the metdata will contain the frame number and timestamp of
3006 * the last frame in the batch. Eg: a batch containing buffers from request
3007 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3008 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3009 * multiple process_capture_results */
3010 metadata_buffer_t *metadata =
3011 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3012 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3013 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3014 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3015 uint32_t frame_number = 0, urgent_frame_number = 0;
3016 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3017 bool invalid_metadata = false;
3018 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3019 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003020 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003021
3022 int32_t *p_frame_number_valid =
3023 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3024 uint32_t *p_frame_number =
3025 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3026 int64_t *p_capture_time =
3027 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3028 int32_t *p_urgent_frame_number_valid =
3029 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3030 uint32_t *p_urgent_frame_number =
3031 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3032
3033 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3034 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3035 (NULL == p_urgent_frame_number)) {
3036 LOGE("Invalid metadata");
3037 invalid_metadata = true;
3038 } else {
3039 frame_number_valid = *p_frame_number_valid;
3040 last_frame_number = *p_frame_number;
3041 last_frame_capture_time = *p_capture_time;
3042 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3043 last_urgent_frame_number = *p_urgent_frame_number;
3044 }
3045
3046 /* In batchmode, when no video buffers are requested, set_parms are sent
3047 * for every capture_request. The difference between consecutive urgent
3048 * frame numbers and frame numbers should be used to interpolate the
3049 * corresponding frame numbers and time stamps */
3050 pthread_mutex_lock(&mMutex);
3051 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003052 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3053 if(idx < 0) {
3054 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3055 last_urgent_frame_number);
3056 mState = ERROR;
3057 pthread_mutex_unlock(&mMutex);
3058 return;
3059 }
3060 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003061 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3062 first_urgent_frame_number;
3063
3064 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3065 urgent_frame_number_valid,
3066 first_urgent_frame_number, last_urgent_frame_number);
3067 }
3068
3069 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003070 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3071 if(idx < 0) {
3072 LOGE("Invalid frame number received: %d. Irrecoverable error",
3073 last_frame_number);
3074 mState = ERROR;
3075 pthread_mutex_unlock(&mMutex);
3076 return;
3077 }
3078 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003079 frameNumDiff = last_frame_number + 1 -
3080 first_frame_number;
3081 mPendingBatchMap.removeItem(last_frame_number);
3082
3083 LOGD("frm: valid: %d frm_num: %d - %d",
3084 frame_number_valid,
3085 first_frame_number, last_frame_number);
3086
3087 }
3088 pthread_mutex_unlock(&mMutex);
3089
3090 if (urgent_frame_number_valid || frame_number_valid) {
3091 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3092 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3093 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3094 urgentFrameNumDiff, last_urgent_frame_number);
3095 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3096 LOGE("frameNumDiff: %d frameNum: %d",
3097 frameNumDiff, last_frame_number);
3098 }
3099
3100 for (size_t i = 0; i < loopCount; i++) {
3101 /* handleMetadataWithLock is called even for invalid_metadata for
3102 * pipeline depth calculation */
3103 if (!invalid_metadata) {
3104 /* Infer frame number. Batch metadata contains frame number of the
3105 * last frame */
3106 if (urgent_frame_number_valid) {
3107 if (i < urgentFrameNumDiff) {
3108 urgent_frame_number =
3109 first_urgent_frame_number + i;
3110 LOGD("inferred urgent frame_number: %d",
3111 urgent_frame_number);
3112 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3113 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3114 } else {
3115 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3116 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3117 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3118 }
3119 }
3120
3121 /* Infer frame number. Batch metadata contains frame number of the
3122 * last frame */
3123 if (frame_number_valid) {
3124 if (i < frameNumDiff) {
3125 frame_number = first_frame_number + i;
3126 LOGD("inferred frame_number: %d", frame_number);
3127 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3128 CAM_INTF_META_FRAME_NUMBER, frame_number);
3129 } else {
3130 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3131 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3132 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3133 }
3134 }
3135
3136 if (last_frame_capture_time) {
3137 //Infer timestamp
3138 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003139 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003140 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003141 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003142 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3143 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3144 LOGD("batch capture_time: %lld, capture_time: %lld",
3145 last_frame_capture_time, capture_time);
3146 }
3147 }
3148 pthread_mutex_lock(&mMutex);
3149 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003150 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003151 (i == 0) /* first metadata in the batch metadata */,
3152 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003153 pthread_mutex_unlock(&mMutex);
3154 }
3155
3156 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003157 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003158 mMetadataChannel->bufDone(metadata_buf);
3159 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003160 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003161 }
3162}
3163
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003164void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3165 camera3_error_msg_code_t errorCode)
3166{
3167 camera3_notify_msg_t notify_msg;
3168 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3169 notify_msg.type = CAMERA3_MSG_ERROR;
3170 notify_msg.message.error.error_code = errorCode;
3171 notify_msg.message.error.error_stream = NULL;
3172 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003173 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003174
3175 return;
3176}
Thierry Strudel3d639192016-09-09 11:52:26 -07003177/*===========================================================================
3178 * FUNCTION : handleMetadataWithLock
3179 *
3180 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3181 *
3182 * PARAMETERS : @metadata_buf: metadata buffer
3183 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3184 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003185 * @firstMetadataInBatch: Boolean to indicate whether this is the
3186 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003187 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3188 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003189 *
3190 * RETURN :
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003194 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003195 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003196{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003197 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003198 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3199 //during flush do not send metadata from this thread
3200 LOGD("not sending metadata during flush or when mState is error");
3201 if (free_and_bufdone_meta_buf) {
3202 mMetadataChannel->bufDone(metadata_buf);
3203 free(metadata_buf);
3204 }
3205 return;
3206 }
3207
3208 //not in flush
3209 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3210 int32_t frame_number_valid, urgent_frame_number_valid;
3211 uint32_t frame_number, urgent_frame_number;
3212 int64_t capture_time;
3213 nsecs_t currentSysTime;
3214
3215 int32_t *p_frame_number_valid =
3216 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3217 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3218 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3219 int32_t *p_urgent_frame_number_valid =
3220 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3221 uint32_t *p_urgent_frame_number =
3222 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3223 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3224 metadata) {
3225 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3226 *p_frame_number_valid, *p_frame_number);
3227 }
3228
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003229 camera_metadata_t *resultMetadata = nullptr;
3230
Thierry Strudel3d639192016-09-09 11:52:26 -07003231 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3232 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3233 LOGE("Invalid metadata");
3234 if (free_and_bufdone_meta_buf) {
3235 mMetadataChannel->bufDone(metadata_buf);
3236 free(metadata_buf);
3237 }
3238 goto done_metadata;
3239 }
3240 frame_number_valid = *p_frame_number_valid;
3241 frame_number = *p_frame_number;
3242 capture_time = *p_capture_time;
3243 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3244 urgent_frame_number = *p_urgent_frame_number;
3245 currentSysTime = systemTime(CLOCK_MONOTONIC);
3246
3247 // Detect if buffers from any requests are overdue
3248 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003249 int64_t timeout;
3250 {
3251 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3252 // If there is a pending HDR+ request, the following requests may be blocked until the
3253 // HDR+ request is done. So allow a longer timeout.
3254 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3255 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3256 }
3257
3258 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003259 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003260 assert(missed.stream->priv);
3261 if (missed.stream->priv) {
3262 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3263 assert(ch->mStreams[0]);
3264 if (ch->mStreams[0]) {
3265 LOGE("Cancel missing frame = %d, buffer = %p,"
3266 "stream type = %d, stream format = %d",
3267 req.frame_number, missed.buffer,
3268 ch->mStreams[0]->getMyType(), missed.stream->format);
3269 ch->timeoutFrame(req.frame_number);
3270 }
3271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003272 }
3273 }
3274 }
3275 //Partial result on process_capture_result for timestamp
3276 if (urgent_frame_number_valid) {
3277 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3278 urgent_frame_number, capture_time);
3279
3280 //Recieved an urgent Frame Number, handle it
3281 //using partial results
3282 for (pendingRequestIterator i =
3283 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3284 LOGD("Iterator Frame = %d urgent frame = %d",
3285 i->frame_number, urgent_frame_number);
3286
3287 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3288 (i->partial_result_cnt == 0)) {
3289 LOGE("Error: HAL missed urgent metadata for frame number %d",
3290 i->frame_number);
3291 }
3292
3293 if (i->frame_number == urgent_frame_number &&
3294 i->bUrgentReceived == 0) {
3295
3296 camera3_capture_result_t result;
3297 memset(&result, 0, sizeof(camera3_capture_result_t));
3298
3299 i->partial_result_cnt++;
3300 i->bUrgentReceived = 1;
3301 // Extract 3A metadata
3302 result.result =
3303 translateCbUrgentMetadataToResultMetadata(metadata);
3304 // Populate metadata result
3305 result.frame_number = urgent_frame_number;
3306 result.num_output_buffers = 0;
3307 result.output_buffers = NULL;
3308 result.partial_result = i->partial_result_cnt;
3309
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003310 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003311 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003312 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003313 result.partial_result == PARTIAL_RESULT_COUNT);
3314 }
3315
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003316 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003317 LOGD("urgent frame_number = %u, capture_time = %lld",
3318 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003319 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3320 // Instant AEC settled for this frame.
3321 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3322 mInstantAECSettledFrameNumber = urgent_frame_number;
3323 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003324 free_camera_metadata((camera_metadata_t *)result.result);
3325 break;
3326 }
3327 }
3328 }
3329
3330 if (!frame_number_valid) {
3331 LOGD("Not a valid normal frame number, used as SOF only");
3332 if (free_and_bufdone_meta_buf) {
3333 mMetadataChannel->bufDone(metadata_buf);
3334 free(metadata_buf);
3335 }
3336 goto done_metadata;
3337 }
3338 LOGH("valid frame_number = %u, capture_time = %lld",
3339 frame_number, capture_time);
3340
Emilian Peev7650c122017-01-19 08:24:33 -08003341 if (metadata->is_depth_data_valid) {
3342 handleDepthDataLocked(metadata->depth_data, frame_number);
3343 }
3344
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003345 // Check whether any stream buffer corresponding to this is dropped or not
3346 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3347 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3348 for (auto & pendingRequest : mPendingRequestsList) {
3349 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3350 mInstantAECSettledFrameNumber)) {
3351 camera3_notify_msg_t notify_msg = {};
3352 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003353 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003354 QCamera3ProcessingChannel *channel =
3355 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003356 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003357 if (p_cam_frame_drop) {
3358 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003359 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003360 // Got the stream ID for drop frame.
3361 dropFrame = true;
3362 break;
3363 }
3364 }
3365 } else {
3366 // This is instant AEC case.
3367 // For instant AEC drop the stream untill AEC is settled.
3368 dropFrame = true;
3369 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003370
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003371 if (dropFrame) {
3372 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3373 if (p_cam_frame_drop) {
3374 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003375 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003376 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003377 } else {
3378 // For instant AEC, inform frame drop and frame number
3379 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3380 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003381 pendingRequest.frame_number, streamID,
3382 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003383 }
3384 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003385 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003386 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003388 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003389 if (p_cam_frame_drop) {
3390 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003391 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003392 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003393 } else {
3394 // For instant AEC, inform frame drop and frame number
3395 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3396 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003397 pendingRequest.frame_number, streamID,
3398 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003399 }
3400 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003401 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003402 PendingFrameDrop.stream_ID = streamID;
3403 // Add the Frame drop info to mPendingFrameDropList
3404 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003405 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003406 }
3407 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003408 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003409
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003410 for (auto & pendingRequest : mPendingRequestsList) {
3411 // Find the pending request with the frame number.
3412 if (pendingRequest.frame_number == frame_number) {
3413 // Update the sensor timestamp.
3414 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003415
Thierry Strudel3d639192016-09-09 11:52:26 -07003416
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003417 /* Set the timestamp in display metadata so that clients aware of
3418 private_handle such as VT can use this un-modified timestamps.
3419 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003420 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003421
Thierry Strudel3d639192016-09-09 11:52:26 -07003422 // Find channel requiring metadata, meaning internal offline postprocess
3423 // is needed.
3424 //TODO: for now, we don't support two streams requiring metadata at the same time.
3425 // (because we are not making copies, and metadata buffer is not reference counted.
3426 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003427 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3428 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003429 if (iter->need_metadata) {
3430 internalPproc = true;
3431 QCamera3ProcessingChannel *channel =
3432 (QCamera3ProcessingChannel *)iter->stream->priv;
3433 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003434 if(p_is_metabuf_queued != NULL) {
3435 *p_is_metabuf_queued = true;
3436 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003437 break;
3438 }
3439 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003440 for (auto itr = pendingRequest.internalRequestList.begin();
3441 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003442 if (itr->need_metadata) {
3443 internalPproc = true;
3444 QCamera3ProcessingChannel *channel =
3445 (QCamera3ProcessingChannel *)itr->stream->priv;
3446 channel->queueReprocMetadata(metadata_buf);
3447 break;
3448 }
3449 }
3450
Thierry Strudel54dc9782017-02-15 12:12:10 -08003451 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003452 resultMetadata = translateFromHalMetadata(metadata,
3453 pendingRequest.timestamp, pendingRequest.request_id,
3454 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3455 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003456 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003457 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003458 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003459 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003460 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003461 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003463 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003464
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003465 if (pendingRequest.blob_request) {
3466 //Dump tuning metadata if enabled and available
3467 char prop[PROPERTY_VALUE_MAX];
3468 memset(prop, 0, sizeof(prop));
3469 property_get("persist.camera.dumpmetadata", prop, "0");
3470 int32_t enabled = atoi(prop);
3471 if (enabled && metadata->is_tuning_params_valid) {
3472 dumpMetadataToFile(metadata->tuning_params,
3473 mMetaFrameCount,
3474 enabled,
3475 "Snapshot",
3476 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003477 }
3478 }
3479
3480 if (!internalPproc) {
3481 LOGD("couldn't find need_metadata for this metadata");
3482 // Return metadata buffer
3483 if (free_and_bufdone_meta_buf) {
3484 mMetadataChannel->bufDone(metadata_buf);
3485 free(metadata_buf);
3486 }
3487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003488
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003489 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003490 }
3491 }
3492
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003493 // Try to send out shutter callbacks and capture results.
3494 handlePendingResultsWithLock(frame_number, resultMetadata);
3495 return;
3496
Thierry Strudel3d639192016-09-09 11:52:26 -07003497done_metadata:
3498 for (pendingRequestIterator i = mPendingRequestsList.begin();
3499 i != mPendingRequestsList.end() ;i++) {
3500 i->pipeline_depth++;
3501 }
3502 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3503 unblockRequestIfNecessary();
3504}
3505
3506/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003507 * FUNCTION : handleDepthDataWithLock
3508 *
3509 * DESCRIPTION: Handles incoming depth data
3510 *
3511 * PARAMETERS : @depthData : Depth data
3512 * @frameNumber: Frame number of the incoming depth data
3513 *
3514 * RETURN :
3515 *
3516 *==========================================================================*/
3517void QCamera3HardwareInterface::handleDepthDataLocked(
3518 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3519 uint32_t currentFrameNumber;
3520 buffer_handle_t *depthBuffer;
3521
3522 if (nullptr == mDepthChannel) {
3523 LOGE("Depth channel not present!");
3524 return;
3525 }
3526
3527 camera3_stream_buffer_t resultBuffer =
3528 {.acquire_fence = -1,
3529 .release_fence = -1,
3530 .status = CAMERA3_BUFFER_STATUS_OK,
3531 .buffer = nullptr,
3532 .stream = mDepthChannel->getStream()};
3533 camera3_capture_result_t result =
3534 {.result = nullptr,
3535 .num_output_buffers = 1,
3536 .output_buffers = &resultBuffer,
3537 .partial_result = 0,
3538 .frame_number = 0};
3539
3540 do {
3541 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3542 if (nullptr == depthBuffer) {
3543 break;
3544 }
3545
3546 result.frame_number = currentFrameNumber;
3547 resultBuffer.buffer = depthBuffer;
3548 if (currentFrameNumber == frameNumber) {
3549 int32_t rc = mDepthChannel->populateDepthData(depthData,
3550 frameNumber);
3551 if (NO_ERROR != rc) {
3552 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3553 } else {
3554 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3555 }
3556 } else if (currentFrameNumber > frameNumber) {
3557 break;
3558 } else {
3559 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3560 {{currentFrameNumber, mDepthChannel->getStream(),
3561 CAMERA3_MSG_ERROR_BUFFER}}};
3562 orchestrateNotify(&notify_msg);
3563
3564 LOGE("Depth buffer for frame number: %d is missing "
3565 "returning back!", currentFrameNumber);
3566 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3567 }
3568 mDepthChannel->unmapBuffer(currentFrameNumber);
3569
3570 orchestrateResult(&result);
3571 } while (currentFrameNumber < frameNumber);
3572}
3573
3574/*===========================================================================
3575 * FUNCTION : notifyErrorFoPendingDepthData
3576 *
3577 * DESCRIPTION: Returns error for any pending depth buffers
3578 *
3579 * PARAMETERS : depthCh - depth channel that needs to get flushed
3580 *
3581 * RETURN :
3582 *
3583 *==========================================================================*/
3584void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3585 QCamera3DepthChannel *depthCh) {
3586 uint32_t currentFrameNumber;
3587 buffer_handle_t *depthBuffer;
3588
3589 if (nullptr == depthCh) {
3590 return;
3591 }
3592
3593 camera3_notify_msg_t notify_msg =
3594 {.type = CAMERA3_MSG_ERROR,
3595 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3596 camera3_stream_buffer_t resultBuffer =
3597 {.acquire_fence = -1,
3598 .release_fence = -1,
3599 .buffer = nullptr,
3600 .stream = depthCh->getStream(),
3601 .status = CAMERA3_BUFFER_STATUS_ERROR};
3602 camera3_capture_result_t result =
3603 {.result = nullptr,
3604 .frame_number = 0,
3605 .num_output_buffers = 1,
3606 .partial_result = 0,
3607 .output_buffers = &resultBuffer};
3608
3609 while (nullptr !=
3610 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3611 depthCh->unmapBuffer(currentFrameNumber);
3612
3613 notify_msg.message.error.frame_number = currentFrameNumber;
3614 orchestrateNotify(&notify_msg);
3615
3616 resultBuffer.buffer = depthBuffer;
3617 result.frame_number = currentFrameNumber;
3618 orchestrateResult(&result);
3619 };
3620}
3621
3622/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003623 * FUNCTION : hdrPlusPerfLock
3624 *
3625 * DESCRIPTION: perf lock for HDR+ using custom intent
3626 *
3627 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3628 *
3629 * RETURN : None
3630 *
3631 *==========================================================================*/
3632void QCamera3HardwareInterface::hdrPlusPerfLock(
3633 mm_camera_super_buf_t *metadata_buf)
3634{
3635 if (NULL == metadata_buf) {
3636 LOGE("metadata_buf is NULL");
3637 return;
3638 }
3639 metadata_buffer_t *metadata =
3640 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3641 int32_t *p_frame_number_valid =
3642 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3643 uint32_t *p_frame_number =
3644 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3645
3646 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3647 LOGE("%s: Invalid metadata", __func__);
3648 return;
3649 }
3650
3651 //acquire perf lock for 5 sec after the last HDR frame is captured
3652 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3653 if ((p_frame_number != NULL) &&
3654 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003655 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003656 }
3657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003658}
3659
3660/*===========================================================================
3661 * FUNCTION : handleInputBufferWithLock
3662 *
3663 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3664 *
3665 * PARAMETERS : @frame_number: frame number of the input buffer
3666 *
3667 * RETURN :
3668 *
3669 *==========================================================================*/
3670void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3671{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003672 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003673 pendingRequestIterator i = mPendingRequestsList.begin();
3674 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3675 i++;
3676 }
3677 if (i != mPendingRequestsList.end() && i->input_buffer) {
3678 //found the right request
3679 if (!i->shutter_notified) {
3680 CameraMetadata settings;
3681 camera3_notify_msg_t notify_msg;
3682 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3683 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3684 if(i->settings) {
3685 settings = i->settings;
3686 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3687 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3688 } else {
3689 LOGE("No timestamp in input settings! Using current one.");
3690 }
3691 } else {
3692 LOGE("Input settings missing!");
3693 }
3694
3695 notify_msg.type = CAMERA3_MSG_SHUTTER;
3696 notify_msg.message.shutter.frame_number = frame_number;
3697 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003698 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003699 i->shutter_notified = true;
3700 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3701 i->frame_number, notify_msg.message.shutter.timestamp);
3702 }
3703
3704 if (i->input_buffer->release_fence != -1) {
3705 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3706 close(i->input_buffer->release_fence);
3707 if (rc != OK) {
3708 LOGE("input buffer sync wait failed %d", rc);
3709 }
3710 }
3711
3712 camera3_capture_result result;
3713 memset(&result, 0, sizeof(camera3_capture_result));
3714 result.frame_number = frame_number;
3715 result.result = i->settings;
3716 result.input_buffer = i->input_buffer;
3717 result.partial_result = PARTIAL_RESULT_COUNT;
3718
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003719 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003720 LOGD("Input request metadata and input buffer frame_number = %u",
3721 i->frame_number);
3722 i = erasePendingRequest(i);
3723 } else {
3724 LOGE("Could not find input request for frame number %d", frame_number);
3725 }
3726}
3727
3728/*===========================================================================
3729 * FUNCTION : handleBufferWithLock
3730 *
3731 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3732 *
3733 * PARAMETERS : @buffer: image buffer for the callback
3734 * @frame_number: frame number of the image buffer
3735 *
3736 * RETURN :
3737 *
3738 *==========================================================================*/
3739void QCamera3HardwareInterface::handleBufferWithLock(
3740 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3741{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003742 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003743
3744 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3745 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3746 }
3747
Thierry Strudel3d639192016-09-09 11:52:26 -07003748 /* Nothing to be done during error state */
3749 if ((ERROR == mState) || (DEINIT == mState)) {
3750 return;
3751 }
3752 if (mFlushPerf) {
3753 handleBuffersDuringFlushLock(buffer);
3754 return;
3755 }
3756 //not in flush
3757 // If the frame number doesn't exist in the pending request list,
3758 // directly send the buffer to the frameworks, and update pending buffers map
3759 // Otherwise, book-keep the buffer.
3760 pendingRequestIterator i = mPendingRequestsList.begin();
3761 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3762 i++;
3763 }
3764 if (i == mPendingRequestsList.end()) {
3765 // Verify all pending requests frame_numbers are greater
3766 for (pendingRequestIterator j = mPendingRequestsList.begin();
3767 j != mPendingRequestsList.end(); j++) {
3768 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3769 LOGW("Error: pending live frame number %d is smaller than %d",
3770 j->frame_number, frame_number);
3771 }
3772 }
3773 camera3_capture_result_t result;
3774 memset(&result, 0, sizeof(camera3_capture_result_t));
3775 result.result = NULL;
3776 result.frame_number = frame_number;
3777 result.num_output_buffers = 1;
3778 result.partial_result = 0;
3779 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3780 m != mPendingFrameDropList.end(); m++) {
3781 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3782 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3783 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3784 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3785 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3786 frame_number, streamID);
3787 m = mPendingFrameDropList.erase(m);
3788 break;
3789 }
3790 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003791 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003792 result.output_buffers = buffer;
3793 LOGH("result frame_number = %d, buffer = %p",
3794 frame_number, buffer->buffer);
3795
3796 mPendingBuffersMap.removeBuf(buffer->buffer);
3797
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003798 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003799 } else {
3800 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003801 if (i->input_buffer->release_fence != -1) {
3802 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3803 close(i->input_buffer->release_fence);
3804 if (rc != OK) {
3805 LOGE("input buffer sync wait failed %d", rc);
3806 }
3807 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003809
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 // Put buffer into the pending request
3811 for (auto &requestedBuffer : i->buffers) {
3812 if (requestedBuffer.stream == buffer->stream) {
3813 if (requestedBuffer.buffer != nullptr) {
3814 LOGE("Error: buffer is already set");
3815 } else {
3816 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3817 sizeof(camera3_stream_buffer_t));
3818 *(requestedBuffer.buffer) = *buffer;
3819 LOGH("cache buffer %p at result frame_number %u",
3820 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003821 }
3822 }
3823 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003824
3825 if (i->input_buffer) {
3826 // For a reprocessing request, try to send out shutter callback and result metadata.
3827 handlePendingResultsWithLock(frame_number, nullptr);
3828 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003829 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003830
3831 if (mPreviewStarted == false) {
3832 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3833 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3834 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3835 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3836 mPreviewStarted = true;
3837
3838 // Set power hint for preview
3839 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3840 }
3841 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003842}
3843
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3845 const camera_metadata_t *resultMetadata)
3846{
3847 // Find the pending request for this result metadata.
3848 auto requestIter = mPendingRequestsList.begin();
3849 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3850 requestIter++;
3851 }
3852
3853 if (requestIter == mPendingRequestsList.end()) {
3854 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3855 return;
3856 }
3857
3858 // Update the result metadata
3859 requestIter->resultMetadata = resultMetadata;
3860
3861 // Check what type of request this is.
3862 bool liveRequest = false;
3863 if (requestIter->hdrplus) {
3864 // HDR+ request doesn't have partial results.
3865 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3866 } else if (requestIter->input_buffer != nullptr) {
3867 // Reprocessing request result is the same as settings.
3868 requestIter->resultMetadata = requestIter->settings;
3869 // Reprocessing request doesn't have partial results.
3870 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3871 } else {
3872 liveRequest = true;
3873 requestIter->partial_result_cnt++;
3874 mPendingLiveRequest--;
3875
3876 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003877 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3878 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003879 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3880 }
3881 }
3882
3883 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3884 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3885 bool readyToSend = true;
3886
3887 // Iterate through the pending requests to send out shutter callbacks and results that are
3888 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3889 // live requests that don't have result metadata yet.
3890 auto iter = mPendingRequestsList.begin();
3891 while (iter != mPendingRequestsList.end()) {
3892 // Check if current pending request is ready. If it's not ready, the following pending
3893 // requests are also not ready.
3894 if (readyToSend && iter->resultMetadata == nullptr) {
3895 readyToSend = false;
3896 }
3897
3898 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3899
3900 std::vector<camera3_stream_buffer_t> outputBuffers;
3901
3902 camera3_capture_result_t result = {};
3903 result.frame_number = iter->frame_number;
3904 result.result = iter->resultMetadata;
3905 result.partial_result = iter->partial_result_cnt;
3906
3907 // If this pending buffer has result metadata, we may be able to send out shutter callback
3908 // and result metadata.
3909 if (iter->resultMetadata != nullptr) {
3910 if (!readyToSend) {
3911 // If any of the previous pending request is not ready, this pending request is
3912 // also not ready to send in order to keep shutter callbacks and result metadata
3913 // in order.
3914 iter++;
3915 continue;
3916 }
3917
3918 // Invoke shutter callback if not yet.
3919 if (!iter->shutter_notified) {
3920 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3921
3922 // Find the timestamp in HDR+ result metadata
3923 camera_metadata_ro_entry_t entry;
3924 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3925 ANDROID_SENSOR_TIMESTAMP, &entry);
3926 if (res != OK) {
3927 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3928 __FUNCTION__, iter->frame_number, strerror(-res), res);
3929 } else {
3930 timestamp = entry.data.i64[0];
3931 }
3932
3933 camera3_notify_msg_t notify_msg = {};
3934 notify_msg.type = CAMERA3_MSG_SHUTTER;
3935 notify_msg.message.shutter.frame_number = iter->frame_number;
3936 notify_msg.message.shutter.timestamp = timestamp;
3937 orchestrateNotify(&notify_msg);
3938 iter->shutter_notified = true;
3939 }
3940
3941 result.input_buffer = iter->input_buffer;
3942
3943 // Prepare output buffer array
3944 for (auto bufferInfoIter = iter->buffers.begin();
3945 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3946 if (bufferInfoIter->buffer != nullptr) {
3947
3948 QCamera3Channel *channel =
3949 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3950 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3951
3952 // Check if this buffer is a dropped frame.
3953 auto frameDropIter = mPendingFrameDropList.begin();
3954 while (frameDropIter != mPendingFrameDropList.end()) {
3955 if((frameDropIter->stream_ID == streamID) &&
3956 (frameDropIter->frame_number == frameNumber)) {
3957 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3958 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3959 streamID);
3960 mPendingFrameDropList.erase(frameDropIter);
3961 break;
3962 } else {
3963 frameDropIter++;
3964 }
3965 }
3966
3967 // Check buffer error status
3968 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3969 bufferInfoIter->buffer->buffer);
3970 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3971
3972 outputBuffers.push_back(*(bufferInfoIter->buffer));
3973 free(bufferInfoIter->buffer);
3974 bufferInfoIter->buffer = NULL;
3975 }
3976 }
3977
3978 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3979 result.num_output_buffers = outputBuffers.size();
3980 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3981 // If the result metadata belongs to a live request, notify errors for previous pending
3982 // live requests.
3983 mPendingLiveRequest--;
3984
3985 CameraMetadata dummyMetadata;
3986 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3987 result.result = dummyMetadata.release();
3988
3989 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3990 } else {
3991 iter++;
3992 continue;
3993 }
3994
3995 orchestrateResult(&result);
3996
3997 // For reprocessing, result metadata is the same as settings so do not free it here to
3998 // avoid double free.
3999 if (result.result != iter->settings) {
4000 free_camera_metadata((camera_metadata_t *)result.result);
4001 }
4002 iter->resultMetadata = nullptr;
4003 iter = erasePendingRequest(iter);
4004 }
4005
4006 if (liveRequest) {
4007 for (auto &iter : mPendingRequestsList) {
4008 // Increment pipeline depth for the following pending requests.
4009 if (iter.frame_number > frameNumber) {
4010 iter.pipeline_depth++;
4011 }
4012 }
4013 }
4014
4015 unblockRequestIfNecessary();
4016}
4017
Thierry Strudel3d639192016-09-09 11:52:26 -07004018/*===========================================================================
4019 * FUNCTION : unblockRequestIfNecessary
4020 *
4021 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4022 * that mMutex is held when this function is called.
4023 *
4024 * PARAMETERS :
4025 *
4026 * RETURN :
4027 *
4028 *==========================================================================*/
4029void QCamera3HardwareInterface::unblockRequestIfNecessary()
4030{
4031 // Unblock process_capture_request
4032 pthread_cond_signal(&mRequestCond);
4033}
4034
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004035/*===========================================================================
4036 * FUNCTION : isHdrSnapshotRequest
4037 *
4038 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4039 *
4040 * PARAMETERS : camera3 request structure
4041 *
4042 * RETURN : boolean decision variable
4043 *
4044 *==========================================================================*/
4045bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4046{
4047 if (request == NULL) {
4048 LOGE("Invalid request handle");
4049 assert(0);
4050 return false;
4051 }
4052
4053 if (!mForceHdrSnapshot) {
4054 CameraMetadata frame_settings;
4055 frame_settings = request->settings;
4056
4057 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4058 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4059 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4060 return false;
4061 }
4062 } else {
4063 return false;
4064 }
4065
4066 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4067 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4068 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4069 return false;
4070 }
4071 } else {
4072 return false;
4073 }
4074 }
4075
4076 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4077 if (request->output_buffers[i].stream->format
4078 == HAL_PIXEL_FORMAT_BLOB) {
4079 return true;
4080 }
4081 }
4082
4083 return false;
4084}
4085/*===========================================================================
4086 * FUNCTION : orchestrateRequest
4087 *
4088 * DESCRIPTION: Orchestrates a capture request from camera service
4089 *
4090 * PARAMETERS :
4091 * @request : request from framework to process
4092 *
4093 * RETURN : Error status codes
4094 *
4095 *==========================================================================*/
4096int32_t QCamera3HardwareInterface::orchestrateRequest(
4097 camera3_capture_request_t *request)
4098{
4099
4100 uint32_t originalFrameNumber = request->frame_number;
4101 uint32_t originalOutputCount = request->num_output_buffers;
4102 const camera_metadata_t *original_settings = request->settings;
4103 List<InternalRequest> internallyRequestedStreams;
4104 List<InternalRequest> emptyInternalList;
4105
4106 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4107 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4108 uint32_t internalFrameNumber;
4109 CameraMetadata modified_meta;
4110
4111
4112 /* Add Blob channel to list of internally requested streams */
4113 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4114 if (request->output_buffers[i].stream->format
4115 == HAL_PIXEL_FORMAT_BLOB) {
4116 InternalRequest streamRequested;
4117 streamRequested.meteringOnly = 1;
4118 streamRequested.need_metadata = 0;
4119 streamRequested.stream = request->output_buffers[i].stream;
4120 internallyRequestedStreams.push_back(streamRequested);
4121 }
4122 }
4123 request->num_output_buffers = 0;
4124 auto itr = internallyRequestedStreams.begin();
4125
4126 /* Modify setting to set compensation */
4127 modified_meta = request->settings;
4128 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4129 uint8_t aeLock = 1;
4130 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4131 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4132 camera_metadata_t *modified_settings = modified_meta.release();
4133 request->settings = modified_settings;
4134
4135 /* Capture Settling & -2x frame */
4136 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4137 request->frame_number = internalFrameNumber;
4138 processCaptureRequest(request, internallyRequestedStreams);
4139
4140 request->num_output_buffers = originalOutputCount;
4141 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4142 request->frame_number = internalFrameNumber;
4143 processCaptureRequest(request, emptyInternalList);
4144 request->num_output_buffers = 0;
4145
4146 modified_meta = modified_settings;
4147 expCompensation = 0;
4148 aeLock = 1;
4149 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4150 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4151 modified_settings = modified_meta.release();
4152 request->settings = modified_settings;
4153
4154 /* Capture Settling & 0X frame */
4155
4156 itr = internallyRequestedStreams.begin();
4157 if (itr == internallyRequestedStreams.end()) {
4158 LOGE("Error Internally Requested Stream list is empty");
4159 assert(0);
4160 } else {
4161 itr->need_metadata = 0;
4162 itr->meteringOnly = 1;
4163 }
4164
4165 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4166 request->frame_number = internalFrameNumber;
4167 processCaptureRequest(request, internallyRequestedStreams);
4168
4169 itr = internallyRequestedStreams.begin();
4170 if (itr == internallyRequestedStreams.end()) {
4171 ALOGE("Error Internally Requested Stream list is empty");
4172 assert(0);
4173 } else {
4174 itr->need_metadata = 1;
4175 itr->meteringOnly = 0;
4176 }
4177
4178 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4179 request->frame_number = internalFrameNumber;
4180 processCaptureRequest(request, internallyRequestedStreams);
4181
4182 /* Capture 2X frame*/
4183 modified_meta = modified_settings;
4184 expCompensation = GB_HDR_2X_STEP_EV;
4185 aeLock = 1;
4186 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4187 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4188 modified_settings = modified_meta.release();
4189 request->settings = modified_settings;
4190
4191 itr = internallyRequestedStreams.begin();
4192 if (itr == internallyRequestedStreams.end()) {
4193 ALOGE("Error Internally Requested Stream list is empty");
4194 assert(0);
4195 } else {
4196 itr->need_metadata = 0;
4197 itr->meteringOnly = 1;
4198 }
4199 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4200 request->frame_number = internalFrameNumber;
4201 processCaptureRequest(request, internallyRequestedStreams);
4202
4203 itr = internallyRequestedStreams.begin();
4204 if (itr == internallyRequestedStreams.end()) {
4205 ALOGE("Error Internally Requested Stream list is empty");
4206 assert(0);
4207 } else {
4208 itr->need_metadata = 1;
4209 itr->meteringOnly = 0;
4210 }
4211
4212 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4213 request->frame_number = internalFrameNumber;
4214 processCaptureRequest(request, internallyRequestedStreams);
4215
4216
4217 /* Capture 2X on original streaming config*/
4218 internallyRequestedStreams.clear();
4219
4220 /* Restore original settings pointer */
4221 request->settings = original_settings;
4222 } else {
4223 uint32_t internalFrameNumber;
4224 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4225 request->frame_number = internalFrameNumber;
4226 return processCaptureRequest(request, internallyRequestedStreams);
4227 }
4228
4229 return NO_ERROR;
4230}
4231
4232/*===========================================================================
4233 * FUNCTION : orchestrateResult
4234 *
4235 * DESCRIPTION: Orchestrates a capture result to camera service
4236 *
4237 * PARAMETERS :
4238 * @request : request from framework to process
4239 *
4240 * RETURN :
4241 *
4242 *==========================================================================*/
4243void QCamera3HardwareInterface::orchestrateResult(
4244 camera3_capture_result_t *result)
4245{
4246 uint32_t frameworkFrameNumber;
4247 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4248 frameworkFrameNumber);
4249 if (rc != NO_ERROR) {
4250 LOGE("Cannot find translated frameworkFrameNumber");
4251 assert(0);
4252 } else {
4253 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004254 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004255 } else {
4256 result->frame_number = frameworkFrameNumber;
4257 mCallbackOps->process_capture_result(mCallbackOps, result);
4258 }
4259 }
4260}
4261
4262/*===========================================================================
4263 * FUNCTION : orchestrateNotify
4264 *
4265 * DESCRIPTION: Orchestrates a notify to camera service
4266 *
4267 * PARAMETERS :
4268 * @request : request from framework to process
4269 *
4270 * RETURN :
4271 *
4272 *==========================================================================*/
4273void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4274{
4275 uint32_t frameworkFrameNumber;
4276 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004277 int32_t rc = NO_ERROR;
4278
4279 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004280 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004281
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004282 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004283 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4284 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4285 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004286 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004287 LOGE("Cannot find translated frameworkFrameNumber");
4288 assert(0);
4289 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004290 }
4291 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004292
4293 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4294 LOGD("Internal Request drop the notifyCb");
4295 } else {
4296 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4297 mCallbackOps->notify(mCallbackOps, notify_msg);
4298 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004299}
4300
4301/*===========================================================================
4302 * FUNCTION : FrameNumberRegistry
4303 *
4304 * DESCRIPTION: Constructor
4305 *
4306 * PARAMETERS :
4307 *
4308 * RETURN :
4309 *
4310 *==========================================================================*/
4311FrameNumberRegistry::FrameNumberRegistry()
4312{
4313 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4314}
4315
4316/*===========================================================================
4317 * FUNCTION : ~FrameNumberRegistry
4318 *
4319 * DESCRIPTION: Destructor
4320 *
4321 * PARAMETERS :
4322 *
4323 * RETURN :
4324 *
4325 *==========================================================================*/
4326FrameNumberRegistry::~FrameNumberRegistry()
4327{
4328}
4329
4330/*===========================================================================
4331 * FUNCTION : PurgeOldEntriesLocked
4332 *
4333 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4334 *
4335 * PARAMETERS :
4336 *
4337 * RETURN : NONE
4338 *
4339 *==========================================================================*/
4340void FrameNumberRegistry::purgeOldEntriesLocked()
4341{
4342 while (_register.begin() != _register.end()) {
4343 auto itr = _register.begin();
4344 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4345 _register.erase(itr);
4346 } else {
4347 return;
4348 }
4349 }
4350}
4351
4352/*===========================================================================
4353 * FUNCTION : allocStoreInternalFrameNumber
4354 *
4355 * DESCRIPTION: Method to note down a framework request and associate a new
4356 * internal request number against it
4357 *
4358 * PARAMETERS :
4359 * @fFrameNumber: Identifier given by framework
4360 * @internalFN : Output parameter which will have the newly generated internal
4361 * entry
4362 *
4363 * RETURN : Error code
4364 *
4365 *==========================================================================*/
4366int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4367 uint32_t &internalFrameNumber)
4368{
4369 Mutex::Autolock lock(mRegistryLock);
4370 internalFrameNumber = _nextFreeInternalNumber++;
4371 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4372 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4373 purgeOldEntriesLocked();
4374 return NO_ERROR;
4375}
4376
4377/*===========================================================================
4378 * FUNCTION : generateStoreInternalFrameNumber
4379 *
4380 * DESCRIPTION: Method to associate a new internal request number independent
4381 * of any associate with framework requests
4382 *
4383 * PARAMETERS :
4384 * @internalFrame#: Output parameter which will have the newly generated internal
4385 *
4386 *
4387 * RETURN : Error code
4388 *
4389 *==========================================================================*/
4390int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4391{
4392 Mutex::Autolock lock(mRegistryLock);
4393 internalFrameNumber = _nextFreeInternalNumber++;
4394 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4395 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4396 purgeOldEntriesLocked();
4397 return NO_ERROR;
4398}
4399
4400/*===========================================================================
4401 * FUNCTION : getFrameworkFrameNumber
4402 *
4403 * DESCRIPTION: Method to query the framework framenumber given an internal #
4404 *
4405 * PARAMETERS :
4406 * @internalFrame#: Internal reference
4407 * @frameworkframenumber: Output parameter holding framework frame entry
4408 *
4409 * RETURN : Error code
4410 *
4411 *==========================================================================*/
4412int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4413 uint32_t &frameworkFrameNumber)
4414{
4415 Mutex::Autolock lock(mRegistryLock);
4416 auto itr = _register.find(internalFrameNumber);
4417 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004418 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004419 return -ENOENT;
4420 }
4421
4422 frameworkFrameNumber = itr->second;
4423 purgeOldEntriesLocked();
4424 return NO_ERROR;
4425}
Thierry Strudel3d639192016-09-09 11:52:26 -07004426
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004427status_t QCamera3HardwareInterface::fillPbStreamConfig(
4428 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4429 QCamera3Channel *channel, uint32_t streamIndex) {
4430 if (config == nullptr) {
4431 LOGE("%s: config is null", __FUNCTION__);
4432 return BAD_VALUE;
4433 }
4434
4435 if (channel == nullptr) {
4436 LOGE("%s: channel is null", __FUNCTION__);
4437 return BAD_VALUE;
4438 }
4439
4440 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4441 if (stream == nullptr) {
4442 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4443 return NAME_NOT_FOUND;
4444 }
4445
4446 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4447 if (streamInfo == nullptr) {
4448 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4449 return NAME_NOT_FOUND;
4450 }
4451
4452 config->id = pbStreamId;
4453 config->image.width = streamInfo->dim.width;
4454 config->image.height = streamInfo->dim.height;
4455 config->image.padding = 0;
4456 config->image.format = pbStreamFormat;
4457
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004458 uint32_t totalPlaneSize = 0;
4459
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004460 // Fill plane information.
4461 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4462 pbcamera::PlaneConfiguration plane;
4463 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4464 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4465 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004466
4467 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004468 }
4469
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004470 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004471 return OK;
4472}
4473
Thierry Strudel3d639192016-09-09 11:52:26 -07004474/*===========================================================================
4475 * FUNCTION : processCaptureRequest
4476 *
4477 * DESCRIPTION: process a capture request from camera service
4478 *
4479 * PARAMETERS :
4480 * @request : request from framework to process
4481 *
4482 * RETURN :
4483 *
4484 *==========================================================================*/
4485int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004486 camera3_capture_request_t *request,
4487 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004488{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004489 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004490 int rc = NO_ERROR;
4491 int32_t request_id;
4492 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004493 bool isVidBufRequested = false;
4494 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004495 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004496
4497 pthread_mutex_lock(&mMutex);
4498
4499 // Validate current state
4500 switch (mState) {
4501 case CONFIGURED:
4502 case STARTED:
4503 /* valid state */
4504 break;
4505
4506 case ERROR:
4507 pthread_mutex_unlock(&mMutex);
4508 handleCameraDeviceError();
4509 return -ENODEV;
4510
4511 default:
4512 LOGE("Invalid state %d", mState);
4513 pthread_mutex_unlock(&mMutex);
4514 return -ENODEV;
4515 }
4516
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004517 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004518 if (rc != NO_ERROR) {
4519 LOGE("incoming request is not valid");
4520 pthread_mutex_unlock(&mMutex);
4521 return rc;
4522 }
4523
4524 meta = request->settings;
4525
4526 // For first capture request, send capture intent, and
4527 // stream on all streams
4528 if (mState == CONFIGURED) {
4529 // send an unconfigure to the backend so that the isp
4530 // resources are deallocated
4531 if (!mFirstConfiguration) {
4532 cam_stream_size_info_t stream_config_info;
4533 int32_t hal_version = CAM_HAL_V3;
4534 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4535 stream_config_info.buffer_info.min_buffers =
4536 MIN_INFLIGHT_REQUESTS;
4537 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004538 m_bIs4KVideo ? 0 :
4539 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004540 clear_metadata_buffer(mParameters);
4541 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4542 CAM_INTF_PARM_HAL_VERSION, hal_version);
4543 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4544 CAM_INTF_META_STREAM_INFO, stream_config_info);
4545 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4546 mParameters);
4547 if (rc < 0) {
4548 LOGE("set_parms for unconfigure failed");
4549 pthread_mutex_unlock(&mMutex);
4550 return rc;
4551 }
4552 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004553 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004554 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004555 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004556 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004557 property_get("persist.camera.is_type", is_type_value, "4");
4558 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4559 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4560 property_get("persist.camera.is_type_preview", is_type_value, "4");
4561 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4562 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004563
4564 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4565 int32_t hal_version = CAM_HAL_V3;
4566 uint8_t captureIntent =
4567 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4568 mCaptureIntent = captureIntent;
4569 clear_metadata_buffer(mParameters);
4570 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4571 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4572 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004573 if (mFirstConfiguration) {
4574 // configure instant AEC
4575 // Instant AEC is a session based parameter and it is needed only
4576 // once per complete session after open camera.
4577 // i.e. This is set only once for the first capture request, after open camera.
4578 setInstantAEC(meta);
4579 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004580 uint8_t fwkVideoStabMode=0;
4581 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4582 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4583 }
4584
4585 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4586 // turn it on for video/preview
4587 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4588 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004589 int32_t vsMode;
4590 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4592 rc = BAD_VALUE;
4593 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004594 LOGD("setEis %d", setEis);
4595 bool eis3Supported = false;
4596 size_t count = IS_TYPE_MAX;
4597 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4598 for (size_t i = 0; i < count; i++) {
4599 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4600 eis3Supported = true;
4601 break;
4602 }
4603 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004604
4605 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004606 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004607 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4608 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004609 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4610 is_type = isTypePreview;
4611 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4612 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4613 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004614 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004615 } else {
4616 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004617 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004618 } else {
4619 is_type = IS_TYPE_NONE;
4620 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004621 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004622 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004623 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4624 }
4625 }
4626
4627 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4628 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4629
Thierry Strudel54dc9782017-02-15 12:12:10 -08004630 //Disable tintless only if the property is set to 0
4631 memset(prop, 0, sizeof(prop));
4632 property_get("persist.camera.tintless.enable", prop, "1");
4633 int32_t tintless_value = atoi(prop);
4634
Thierry Strudel3d639192016-09-09 11:52:26 -07004635 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4636 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004637
Thierry Strudel3d639192016-09-09 11:52:26 -07004638 //Disable CDS for HFR mode or if DIS/EIS is on.
4639 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4640 //after every configure_stream
4641 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4642 (m_bIsVideo)) {
4643 int32_t cds = CAM_CDS_MODE_OFF;
4644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4645 CAM_INTF_PARM_CDS_MODE, cds))
4646 LOGE("Failed to disable CDS for HFR mode");
4647
4648 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004649
4650 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4651 uint8_t* use_av_timer = NULL;
4652
4653 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004654 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004655 use_av_timer = &m_debug_avtimer;
4656 }
4657 else{
4658 use_av_timer =
4659 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004660 if (use_av_timer) {
4661 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4662 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004663 }
4664
4665 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4666 rc = BAD_VALUE;
4667 }
4668 }
4669
Thierry Strudel3d639192016-09-09 11:52:26 -07004670 setMobicat();
4671
4672 /* Set fps and hfr mode while sending meta stream info so that sensor
4673 * can configure appropriate streaming mode */
4674 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004675 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4676 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004677 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4678 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004679 if (rc == NO_ERROR) {
4680 int32_t max_fps =
4681 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004682 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004683 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4684 }
4685 /* For HFR, more buffers are dequeued upfront to improve the performance */
4686 if (mBatchSize) {
4687 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4688 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4689 }
4690 }
4691 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004692 LOGE("setHalFpsRange failed");
4693 }
4694 }
4695 if (meta.exists(ANDROID_CONTROL_MODE)) {
4696 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4697 rc = extractSceneMode(meta, metaMode, mParameters);
4698 if (rc != NO_ERROR) {
4699 LOGE("extractSceneMode failed");
4700 }
4701 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004702 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004703
Thierry Strudel04e026f2016-10-10 11:27:36 -07004704 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4705 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4706 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4707 rc = setVideoHdrMode(mParameters, vhdr);
4708 if (rc != NO_ERROR) {
4709 LOGE("setVideoHDR is failed");
4710 }
4711 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004712
Thierry Strudel3d639192016-09-09 11:52:26 -07004713 //TODO: validate the arguments, HSV scenemode should have only the
4714 //advertised fps ranges
4715
4716 /*set the capture intent, hal version, tintless, stream info,
4717 *and disenable parameters to the backend*/
4718 LOGD("set_parms META_STREAM_INFO " );
4719 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004720 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4721 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004722 mStreamConfigInfo.type[i],
4723 mStreamConfigInfo.stream_sizes[i].width,
4724 mStreamConfigInfo.stream_sizes[i].height,
4725 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004726 mStreamConfigInfo.format[i],
4727 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004728 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004729
Thierry Strudel3d639192016-09-09 11:52:26 -07004730 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4731 mParameters);
4732 if (rc < 0) {
4733 LOGE("set_parms failed for hal version, stream info");
4734 }
4735
Chien-Yu Chenee335912017-02-09 17:53:20 -08004736 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4737 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004738 if (rc != NO_ERROR) {
4739 LOGE("Failed to get sensor output size");
4740 pthread_mutex_unlock(&mMutex);
4741 goto error_exit;
4742 }
4743
4744 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4745 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004746 mSensorModeInfo.active_array_size.width,
4747 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004748
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004749 if (gHdrPlusClient != nullptr) {
4750 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4751 if (rc != OK) {
4752 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4753 mCameraId, mSensorModeInfo.op_pixel_clk);
4754 pthread_mutex_unlock(&mMutex);
4755 goto error_exit;
4756 }
4757 }
4758
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 /* Set batchmode before initializing channel. Since registerBuffer
4760 * internally initializes some of the channels, better set batchmode
4761 * even before first register buffer */
4762 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4763 it != mStreamInfo.end(); it++) {
4764 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4765 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4766 && mBatchSize) {
4767 rc = channel->setBatchSize(mBatchSize);
4768 //Disable per frame map unmap for HFR/batchmode case
4769 rc |= channel->setPerFrameMapUnmap(false);
4770 if (NO_ERROR != rc) {
4771 LOGE("Channel init failed %d", rc);
4772 pthread_mutex_unlock(&mMutex);
4773 goto error_exit;
4774 }
4775 }
4776 }
4777
4778 //First initialize all streams
4779 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4780 it != mStreamInfo.end(); it++) {
4781 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4782 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4783 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004784 setEis) {
4785 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4786 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4787 is_type = mStreamConfigInfo.is_type[i];
4788 break;
4789 }
4790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004792 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004793 rc = channel->initialize(IS_TYPE_NONE);
4794 }
4795 if (NO_ERROR != rc) {
4796 LOGE("Channel initialization failed %d", rc);
4797 pthread_mutex_unlock(&mMutex);
4798 goto error_exit;
4799 }
4800 }
4801
4802 if (mRawDumpChannel) {
4803 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4804 if (rc != NO_ERROR) {
4805 LOGE("Error: Raw Dump Channel init failed");
4806 pthread_mutex_unlock(&mMutex);
4807 goto error_exit;
4808 }
4809 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004810 if (mHdrPlusRawSrcChannel) {
4811 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4812 if (rc != NO_ERROR) {
4813 LOGE("Error: HDR+ RAW Source Channel init failed");
4814 pthread_mutex_unlock(&mMutex);
4815 goto error_exit;
4816 }
4817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004818 if (mSupportChannel) {
4819 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4820 if (rc < 0) {
4821 LOGE("Support channel initialization failed");
4822 pthread_mutex_unlock(&mMutex);
4823 goto error_exit;
4824 }
4825 }
4826 if (mAnalysisChannel) {
4827 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4828 if (rc < 0) {
4829 LOGE("Analysis channel initialization failed");
4830 pthread_mutex_unlock(&mMutex);
4831 goto error_exit;
4832 }
4833 }
4834 if (mDummyBatchChannel) {
4835 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4836 if (rc < 0) {
4837 LOGE("mDummyBatchChannel setBatchSize failed");
4838 pthread_mutex_unlock(&mMutex);
4839 goto error_exit;
4840 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 if (rc < 0) {
4843 LOGE("mDummyBatchChannel initialization failed");
4844 pthread_mutex_unlock(&mMutex);
4845 goto error_exit;
4846 }
4847 }
4848
4849 // Set bundle info
4850 rc = setBundleInfo();
4851 if (rc < 0) {
4852 LOGE("setBundleInfo failed %d", rc);
4853 pthread_mutex_unlock(&mMutex);
4854 goto error_exit;
4855 }
4856
4857 //update settings from app here
4858 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4859 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4860 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4861 }
4862 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4863 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4864 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4865 }
4866 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4867 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4868 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4869
4870 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4871 (mLinkedCameraId != mCameraId) ) {
4872 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4873 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 goto error_exit;
4876 }
4877 }
4878
4879 // add bundle related cameras
4880 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4881 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004882 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4883 &m_pDualCamCmdPtr->bundle_info;
4884 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004885 if (mIsDeviceLinked)
4886 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4887 else
4888 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4889
4890 pthread_mutex_lock(&gCamLock);
4891
4892 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4893 LOGE("Dualcam: Invalid Session Id ");
4894 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004895 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 goto error_exit;
4897 }
4898
4899 if (mIsMainCamera == 1) {
4900 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4901 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004902 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004903 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 // related session id should be session id of linked session
4905 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4906 } else {
4907 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4908 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004909 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004910 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4912 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004913 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 pthread_mutex_unlock(&gCamLock);
4915
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004916 rc = mCameraHandle->ops->set_dual_cam_cmd(
4917 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004918 if (rc < 0) {
4919 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004920 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004921 goto error_exit;
4922 }
4923 }
4924
4925 //Then start them.
4926 LOGH("Start META Channel");
4927 rc = mMetadataChannel->start();
4928 if (rc < 0) {
4929 LOGE("META channel start failed");
4930 pthread_mutex_unlock(&mMutex);
4931 goto error_exit;
4932 }
4933
4934 if (mAnalysisChannel) {
4935 rc = mAnalysisChannel->start();
4936 if (rc < 0) {
4937 LOGE("Analysis channel start failed");
4938 mMetadataChannel->stop();
4939 pthread_mutex_unlock(&mMutex);
4940 goto error_exit;
4941 }
4942 }
4943
4944 if (mSupportChannel) {
4945 rc = mSupportChannel->start();
4946 if (rc < 0) {
4947 LOGE("Support channel start failed");
4948 mMetadataChannel->stop();
4949 /* Although support and analysis are mutually exclusive today
4950 adding it in anycase for future proofing */
4951 if (mAnalysisChannel) {
4952 mAnalysisChannel->stop();
4953 }
4954 pthread_mutex_unlock(&mMutex);
4955 goto error_exit;
4956 }
4957 }
4958 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4959 it != mStreamInfo.end(); it++) {
4960 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4961 LOGH("Start Processing Channel mask=%d",
4962 channel->getStreamTypeMask());
4963 rc = channel->start();
4964 if (rc < 0) {
4965 LOGE("channel start failed");
4966 pthread_mutex_unlock(&mMutex);
4967 goto error_exit;
4968 }
4969 }
4970
4971 if (mRawDumpChannel) {
4972 LOGD("Starting raw dump stream");
4973 rc = mRawDumpChannel->start();
4974 if (rc != NO_ERROR) {
4975 LOGE("Error Starting Raw Dump Channel");
4976 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4977 it != mStreamInfo.end(); it++) {
4978 QCamera3Channel *channel =
4979 (QCamera3Channel *)(*it)->stream->priv;
4980 LOGH("Stopping Processing Channel mask=%d",
4981 channel->getStreamTypeMask());
4982 channel->stop();
4983 }
4984 if (mSupportChannel)
4985 mSupportChannel->stop();
4986 if (mAnalysisChannel) {
4987 mAnalysisChannel->stop();
4988 }
4989 mMetadataChannel->stop();
4990 pthread_mutex_unlock(&mMutex);
4991 goto error_exit;
4992 }
4993 }
4994
4995 if (mChannelHandle) {
4996
4997 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4998 mChannelHandle);
4999 if (rc != NO_ERROR) {
5000 LOGE("start_channel failed %d", rc);
5001 pthread_mutex_unlock(&mMutex);
5002 goto error_exit;
5003 }
5004 }
5005
5006 goto no_error;
5007error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005008 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 return rc;
5010no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005011 mWokenUpByDaemon = false;
5012 mPendingLiveRequest = 0;
5013 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005014 }
5015
Chien-Yu Chenee335912017-02-09 17:53:20 -08005016 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005017 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005018 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5019 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5020 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5021 rc = enableHdrPlusModeLocked();
5022 if (rc != OK) {
5023 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5024 pthread_mutex_unlock(&mMutex);
5025 return rc;
5026 }
5027
5028 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5029 if (mHdrPlusRawSrcChannel) {
5030 rc = mHdrPlusRawSrcChannel->start();
5031 if (rc != OK) {
5032 LOGE("Error Starting HDR+ RAW Channel");
5033 pthread_mutex_unlock(&mMutex);
5034 return rc;
5035 }
5036 }
5037 mFirstPreviewIntentSeen = true;
5038 }
5039
Thierry Strudel3d639192016-09-09 11:52:26 -07005040 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005041 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005042
5043 if (mFlushPerf) {
5044 //we cannot accept any requests during flush
5045 LOGE("process_capture_request cannot proceed during flush");
5046 pthread_mutex_unlock(&mMutex);
5047 return NO_ERROR; //should return an error
5048 }
5049
5050 if (meta.exists(ANDROID_REQUEST_ID)) {
5051 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5052 mCurrentRequestId = request_id;
5053 LOGD("Received request with id: %d", request_id);
5054 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5055 LOGE("Unable to find request id field, \
5056 & no previous id available");
5057 pthread_mutex_unlock(&mMutex);
5058 return NAME_NOT_FOUND;
5059 } else {
5060 LOGD("Re-using old request id");
5061 request_id = mCurrentRequestId;
5062 }
5063
5064 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5065 request->num_output_buffers,
5066 request->input_buffer,
5067 frameNumber);
5068 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005069 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005070 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005071 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 uint32_t snapshotStreamId = 0;
5073 for (size_t i = 0; i < request->num_output_buffers; i++) {
5074 const camera3_stream_buffer_t& output = request->output_buffers[i];
5075 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5076
Emilian Peev7650c122017-01-19 08:24:33 -08005077 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5078 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005079 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 blob_request = 1;
5081 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5082 }
5083
5084 if (output.acquire_fence != -1) {
5085 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5086 close(output.acquire_fence);
5087 if (rc != OK) {
5088 LOGE("sync wait failed %d", rc);
5089 pthread_mutex_unlock(&mMutex);
5090 return rc;
5091 }
5092 }
5093
Emilian Peev7650c122017-01-19 08:24:33 -08005094 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
5095 depthRequestPresent = true;
5096 continue;
5097 }
5098
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005099 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005100 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005101
5102 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5103 isVidBufRequested = true;
5104 }
5105 }
5106
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005107 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5108 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5109 itr++) {
5110 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5111 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5112 channel->getStreamID(channel->getStreamTypeMask());
5113
5114 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5115 isVidBufRequested = true;
5116 }
5117 }
5118
Thierry Strudel3d639192016-09-09 11:52:26 -07005119 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005120 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005121 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 }
5123 if (blob_request && mRawDumpChannel) {
5124 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005125 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005126 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005127 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 }
5129
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005130 {
5131 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5132 // Request a RAW buffer if
5133 // 1. mHdrPlusRawSrcChannel is valid.
5134 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5135 // 3. There is no pending HDR+ request.
5136 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5137 mHdrPlusPendingRequests.size() == 0) {
5138 streamsArray.stream_request[streamsArray.num_streams].streamID =
5139 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5140 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5141 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005142 }
5143
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005144 //extract capture intent
5145 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5146 mCaptureIntent =
5147 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5148 }
5149
5150 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5151 mCacMode =
5152 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5153 }
5154
5155 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005156 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005157
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005158 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005159 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005160 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5161 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005162 }
5163
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005164 if (hdrPlusRequest) {
5165 // For a HDR+ request, just set the frame parameters.
5166 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5167 if (rc < 0) {
5168 LOGE("fail to set frame parameters");
5169 pthread_mutex_unlock(&mMutex);
5170 return rc;
5171 }
5172 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 /* Parse the settings:
5174 * - For every request in NORMAL MODE
5175 * - For every request in HFR mode during preview only case
5176 * - For first request of every batch in HFR mode during video
5177 * recording. In batchmode the same settings except frame number is
5178 * repeated in each request of the batch.
5179 */
5180 if (!mBatchSize ||
5181 (mBatchSize && !isVidBufRequested) ||
5182 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005183 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005184 if (rc < 0) {
5185 LOGE("fail to set frame parameters");
5186 pthread_mutex_unlock(&mMutex);
5187 return rc;
5188 }
5189 }
5190 /* For batchMode HFR, setFrameParameters is not called for every
5191 * request. But only frame number of the latest request is parsed.
5192 * Keep track of first and last frame numbers in a batch so that
5193 * metadata for the frame numbers of batch can be duplicated in
5194 * handleBatchMetadta */
5195 if (mBatchSize) {
5196 if (!mToBeQueuedVidBufs) {
5197 //start of the batch
5198 mFirstFrameNumberInBatch = request->frame_number;
5199 }
5200 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5201 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5202 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005203 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 return BAD_VALUE;
5205 }
5206 }
5207 if (mNeedSensorRestart) {
5208 /* Unlock the mutex as restartSensor waits on the channels to be
5209 * stopped, which in turn calls stream callback functions -
5210 * handleBufferWithLock and handleMetadataWithLock */
5211 pthread_mutex_unlock(&mMutex);
5212 rc = dynamicUpdateMetaStreamInfo();
5213 if (rc != NO_ERROR) {
5214 LOGE("Restarting the sensor failed");
5215 return BAD_VALUE;
5216 }
5217 mNeedSensorRestart = false;
5218 pthread_mutex_lock(&mMutex);
5219 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005220 if(mResetInstantAEC) {
5221 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5222 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5223 mResetInstantAEC = false;
5224 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005225 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 if (request->input_buffer->acquire_fence != -1) {
5227 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5228 close(request->input_buffer->acquire_fence);
5229 if (rc != OK) {
5230 LOGE("input buffer sync wait failed %d", rc);
5231 pthread_mutex_unlock(&mMutex);
5232 return rc;
5233 }
5234 }
5235 }
5236
5237 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5238 mLastCustIntentFrmNum = frameNumber;
5239 }
5240 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005241 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 pendingRequestIterator latestRequest;
5243 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005244 pendingRequest.num_buffers = depthRequestPresent ?
5245 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005246 pendingRequest.request_id = request_id;
5247 pendingRequest.blob_request = blob_request;
5248 pendingRequest.timestamp = 0;
5249 pendingRequest.bUrgentReceived = 0;
5250 if (request->input_buffer) {
5251 pendingRequest.input_buffer =
5252 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5253 *(pendingRequest.input_buffer) = *(request->input_buffer);
5254 pInputBuffer = pendingRequest.input_buffer;
5255 } else {
5256 pendingRequest.input_buffer = NULL;
5257 pInputBuffer = NULL;
5258 }
5259
5260 pendingRequest.pipeline_depth = 0;
5261 pendingRequest.partial_result_cnt = 0;
5262 extractJpegMetadata(mCurJpegMeta, request);
5263 pendingRequest.jpegMetadata = mCurJpegMeta;
5264 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5265 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005266 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005267 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5268 mHybridAeEnable =
5269 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5270 }
5271 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005272 /* DevCamDebug metadata processCaptureRequest */
5273 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5274 mDevCamDebugMetaEnable =
5275 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5276 }
5277 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5278 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005279
5280 //extract CAC info
5281 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5282 mCacMode =
5283 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5284 }
5285 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005286 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287
5288 PendingBuffersInRequest bufsForCurRequest;
5289 bufsForCurRequest.frame_number = frameNumber;
5290 // Mark current timestamp for the new request
5291 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005292 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005293
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005294 if (hdrPlusRequest) {
5295 // Save settings for this request.
5296 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5297 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5298
5299 // Add to pending HDR+ request queue.
5300 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5301 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5302
5303 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5304 }
5305
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005307 if (request->output_buffers[i].stream->data_space ==
5308 HAL_DATASPACE_DEPTH) {
5309 continue;
5310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 RequestedBufferInfo requestedBuf;
5312 memset(&requestedBuf, 0, sizeof(requestedBuf));
5313 requestedBuf.stream = request->output_buffers[i].stream;
5314 requestedBuf.buffer = NULL;
5315 pendingRequest.buffers.push_back(requestedBuf);
5316
5317 // Add to buffer handle the pending buffers list
5318 PendingBufferInfo bufferInfo;
5319 bufferInfo.buffer = request->output_buffers[i].buffer;
5320 bufferInfo.stream = request->output_buffers[i].stream;
5321 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5322 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5323 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5324 frameNumber, bufferInfo.buffer,
5325 channel->getStreamTypeMask(), bufferInfo.stream->format);
5326 }
5327 // Add this request packet into mPendingBuffersMap
5328 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5329 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5330 mPendingBuffersMap.get_num_overall_buffers());
5331
5332 latestRequest = mPendingRequestsList.insert(
5333 mPendingRequestsList.end(), pendingRequest);
5334 if(mFlush) {
5335 LOGI("mFlush is true");
5336 pthread_mutex_unlock(&mMutex);
5337 return NO_ERROR;
5338 }
5339
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005340 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5341 // channel.
5342 if (!hdrPlusRequest) {
5343 int indexUsed;
5344 // Notify metadata channel we receive a request
5345 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005346
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005347 if(request->input_buffer != NULL){
5348 LOGD("Input request, frame_number %d", frameNumber);
5349 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5350 if (NO_ERROR != rc) {
5351 LOGE("fail to set reproc parameters");
5352 pthread_mutex_unlock(&mMutex);
5353 return rc;
5354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005355 }
5356
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005357 // Call request on other streams
5358 uint32_t streams_need_metadata = 0;
5359 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5360 for (size_t i = 0; i < request->num_output_buffers; i++) {
5361 const camera3_stream_buffer_t& output = request->output_buffers[i];
5362 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5363
5364 if (channel == NULL) {
5365 LOGW("invalid channel pointer for stream");
5366 continue;
5367 }
5368
5369 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5370 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5371 output.buffer, request->input_buffer, frameNumber);
5372 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005373 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005374 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5375 if (rc < 0) {
5376 LOGE("Fail to request on picture channel");
5377 pthread_mutex_unlock(&mMutex);
5378 return rc;
5379 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005380 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005381 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5382 assert(NULL != mDepthChannel);
5383 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005384
Emilian Peev7650c122017-01-19 08:24:33 -08005385 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5386 if (rc < 0) {
5387 LOGE("Fail to map on depth buffer");
5388 pthread_mutex_unlock(&mMutex);
5389 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390 }
Emilian Peev7650c122017-01-19 08:24:33 -08005391 } else {
5392 LOGD("snapshot request with buffer %p, frame_number %d",
5393 output.buffer, frameNumber);
5394 if (!request->settings) {
5395 rc = channel->request(output.buffer, frameNumber,
5396 NULL, mPrevParameters, indexUsed);
5397 } else {
5398 rc = channel->request(output.buffer, frameNumber,
5399 NULL, mParameters, indexUsed);
5400 }
5401 if (rc < 0) {
5402 LOGE("Fail to request on picture channel");
5403 pthread_mutex_unlock(&mMutex);
5404 return rc;
5405 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005406
Emilian Peev7650c122017-01-19 08:24:33 -08005407 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5408 uint32_t j = 0;
5409 for (j = 0; j < streamsArray.num_streams; j++) {
5410 if (streamsArray.stream_request[j].streamID == streamId) {
5411 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5412 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5413 else
5414 streamsArray.stream_request[j].buf_index = indexUsed;
5415 break;
5416 }
5417 }
5418 if (j == streamsArray.num_streams) {
5419 LOGE("Did not find matching stream to update index");
5420 assert(0);
5421 }
5422
5423 pendingBufferIter->need_metadata = true;
5424 streams_need_metadata++;
5425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005426 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005427 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5428 bool needMetadata = false;
5429 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5430 rc = yuvChannel->request(output.buffer, frameNumber,
5431 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5432 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005433 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005434 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005435 pthread_mutex_unlock(&mMutex);
5436 return rc;
5437 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005438
5439 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5440 uint32_t j = 0;
5441 for (j = 0; j < streamsArray.num_streams; j++) {
5442 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005443 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5444 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5445 else
5446 streamsArray.stream_request[j].buf_index = indexUsed;
5447 break;
5448 }
5449 }
5450 if (j == streamsArray.num_streams) {
5451 LOGE("Did not find matching stream to update index");
5452 assert(0);
5453 }
5454
5455 pendingBufferIter->need_metadata = needMetadata;
5456 if (needMetadata)
5457 streams_need_metadata += 1;
5458 LOGD("calling YUV channel request, need_metadata is %d",
5459 needMetadata);
5460 } else {
5461 LOGD("request with buffer %p, frame_number %d",
5462 output.buffer, frameNumber);
5463
5464 rc = channel->request(output.buffer, frameNumber, indexUsed);
5465
5466 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5467 uint32_t j = 0;
5468 for (j = 0; j < streamsArray.num_streams; j++) {
5469 if (streamsArray.stream_request[j].streamID == streamId) {
5470 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5471 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5472 else
5473 streamsArray.stream_request[j].buf_index = indexUsed;
5474 break;
5475 }
5476 }
5477 if (j == streamsArray.num_streams) {
5478 LOGE("Did not find matching stream to update index");
5479 assert(0);
5480 }
5481
5482 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5483 && mBatchSize) {
5484 mToBeQueuedVidBufs++;
5485 if (mToBeQueuedVidBufs == mBatchSize) {
5486 channel->queueBatchBuf();
5487 }
5488 }
5489 if (rc < 0) {
5490 LOGE("request failed");
5491 pthread_mutex_unlock(&mMutex);
5492 return rc;
5493 }
5494 }
5495 pendingBufferIter++;
5496 }
5497
5498 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5499 itr++) {
5500 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5501
5502 if (channel == NULL) {
5503 LOGE("invalid channel pointer for stream");
5504 assert(0);
5505 return BAD_VALUE;
5506 }
5507
5508 InternalRequest requestedStream;
5509 requestedStream = (*itr);
5510
5511
5512 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5513 LOGD("snapshot request internally input buffer %p, frame_number %d",
5514 request->input_buffer, frameNumber);
5515 if(request->input_buffer != NULL){
5516 rc = channel->request(NULL, frameNumber,
5517 pInputBuffer, &mReprocMeta, indexUsed, true,
5518 requestedStream.meteringOnly);
5519 if (rc < 0) {
5520 LOGE("Fail to request on picture channel");
5521 pthread_mutex_unlock(&mMutex);
5522 return rc;
5523 }
5524 } else {
5525 LOGD("snapshot request with frame_number %d", frameNumber);
5526 if (!request->settings) {
5527 rc = channel->request(NULL, frameNumber,
5528 NULL, mPrevParameters, indexUsed, true,
5529 requestedStream.meteringOnly);
5530 } else {
5531 rc = channel->request(NULL, frameNumber,
5532 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5533 }
5534 if (rc < 0) {
5535 LOGE("Fail to request on picture channel");
5536 pthread_mutex_unlock(&mMutex);
5537 return rc;
5538 }
5539
5540 if ((*itr).meteringOnly != 1) {
5541 requestedStream.need_metadata = 1;
5542 streams_need_metadata++;
5543 }
5544 }
5545
5546 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5547 uint32_t j = 0;
5548 for (j = 0; j < streamsArray.num_streams; j++) {
5549 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005550 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5551 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5552 else
5553 streamsArray.stream_request[j].buf_index = indexUsed;
5554 break;
5555 }
5556 }
5557 if (j == streamsArray.num_streams) {
5558 LOGE("Did not find matching stream to update index");
5559 assert(0);
5560 }
5561
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005562 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005563 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005564 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005566 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005567 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005568 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005569
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005570 //If 2 streams have need_metadata set to true, fail the request, unless
5571 //we copy/reference count the metadata buffer
5572 if (streams_need_metadata > 1) {
5573 LOGE("not supporting request in which two streams requires"
5574 " 2 HAL metadata for reprocessing");
5575 pthread_mutex_unlock(&mMutex);
5576 return -EINVAL;
5577 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005578
Emilian Peev7650c122017-01-19 08:24:33 -08005579 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5581 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5582 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5583 pthread_mutex_unlock(&mMutex);
5584 return BAD_VALUE;
5585 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 if (request->input_buffer == NULL) {
5587 /* Set the parameters to backend:
5588 * - For every request in NORMAL MODE
5589 * - For every request in HFR mode during preview only case
5590 * - Once every batch in HFR mode during video recording
5591 */
5592 if (!mBatchSize ||
5593 (mBatchSize && !isVidBufRequested) ||
5594 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5595 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5596 mBatchSize, isVidBufRequested,
5597 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005598
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005599 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5600 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5601 uint32_t m = 0;
5602 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5603 if (streamsArray.stream_request[k].streamID ==
5604 mBatchedStreamsArray.stream_request[m].streamID)
5605 break;
5606 }
5607 if (m == mBatchedStreamsArray.num_streams) {
5608 mBatchedStreamsArray.stream_request\
5609 [mBatchedStreamsArray.num_streams].streamID =
5610 streamsArray.stream_request[k].streamID;
5611 mBatchedStreamsArray.stream_request\
5612 [mBatchedStreamsArray.num_streams].buf_index =
5613 streamsArray.stream_request[k].buf_index;
5614 mBatchedStreamsArray.num_streams =
5615 mBatchedStreamsArray.num_streams + 1;
5616 }
5617 }
5618 streamsArray = mBatchedStreamsArray;
5619 }
5620 /* Update stream id of all the requested buffers */
5621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5622 streamsArray)) {
5623 LOGE("Failed to set stream type mask in the parameters");
5624 return BAD_VALUE;
5625 }
5626
5627 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5628 mParameters);
5629 if (rc < 0) {
5630 LOGE("set_parms failed");
5631 }
5632 /* reset to zero coz, the batch is queued */
5633 mToBeQueuedVidBufs = 0;
5634 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5635 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5636 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005637 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5638 uint32_t m = 0;
5639 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5640 if (streamsArray.stream_request[k].streamID ==
5641 mBatchedStreamsArray.stream_request[m].streamID)
5642 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 }
5644 if (m == mBatchedStreamsArray.num_streams) {
5645 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5646 streamID = streamsArray.stream_request[k].streamID;
5647 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5648 buf_index = streamsArray.stream_request[k].buf_index;
5649 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5650 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005651 }
5652 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005653 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005654 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005655 }
5656
5657 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5658
5659 mState = STARTED;
5660 // Added a timed condition wait
5661 struct timespec ts;
5662 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005663 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005664 if (rc < 0) {
5665 isValidTimeout = 0;
5666 LOGE("Error reading the real time clock!!");
5667 }
5668 else {
5669 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005670 int64_t timeout = 5;
5671 {
5672 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5673 // If there is a pending HDR+ request, the following requests may be blocked until the
5674 // HDR+ request is done. So allow a longer timeout.
5675 if (mHdrPlusPendingRequests.size() > 0) {
5676 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5677 }
5678 }
5679 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005680 }
5681 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005682 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005683 (mState != ERROR) && (mState != DEINIT)) {
5684 if (!isValidTimeout) {
5685 LOGD("Blocking on conditional wait");
5686 pthread_cond_wait(&mRequestCond, &mMutex);
5687 }
5688 else {
5689 LOGD("Blocking on timed conditional wait");
5690 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5691 if (rc == ETIMEDOUT) {
5692 rc = -ENODEV;
5693 LOGE("Unblocked on timeout!!!!");
5694 break;
5695 }
5696 }
5697 LOGD("Unblocked");
5698 if (mWokenUpByDaemon) {
5699 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005700 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005701 break;
5702 }
5703 }
5704 pthread_mutex_unlock(&mMutex);
5705
5706 return rc;
5707}
5708
5709/*===========================================================================
5710 * FUNCTION : dump
5711 *
5712 * DESCRIPTION:
5713 *
5714 * PARAMETERS :
5715 *
5716 *
5717 * RETURN :
5718 *==========================================================================*/
5719void QCamera3HardwareInterface::dump(int fd)
5720{
5721 pthread_mutex_lock(&mMutex);
5722 dprintf(fd, "\n Camera HAL3 information Begin \n");
5723
5724 dprintf(fd, "\nNumber of pending requests: %zu \n",
5725 mPendingRequestsList.size());
5726 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5727 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5728 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5729 for(pendingRequestIterator i = mPendingRequestsList.begin();
5730 i != mPendingRequestsList.end(); i++) {
5731 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5732 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5733 i->input_buffer);
5734 }
5735 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5736 mPendingBuffersMap.get_num_overall_buffers());
5737 dprintf(fd, "-------+------------------\n");
5738 dprintf(fd, " Frame | Stream type mask \n");
5739 dprintf(fd, "-------+------------------\n");
5740 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5741 for(auto &j : req.mPendingBufferList) {
5742 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5743 dprintf(fd, " %5d | %11d \n",
5744 req.frame_number, channel->getStreamTypeMask());
5745 }
5746 }
5747 dprintf(fd, "-------+------------------\n");
5748
5749 dprintf(fd, "\nPending frame drop list: %zu\n",
5750 mPendingFrameDropList.size());
5751 dprintf(fd, "-------+-----------\n");
5752 dprintf(fd, " Frame | Stream ID \n");
5753 dprintf(fd, "-------+-----------\n");
5754 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5755 i != mPendingFrameDropList.end(); i++) {
5756 dprintf(fd, " %5d | %9d \n",
5757 i->frame_number, i->stream_ID);
5758 }
5759 dprintf(fd, "-------+-----------\n");
5760
5761 dprintf(fd, "\n Camera HAL3 information End \n");
5762
5763 /* use dumpsys media.camera as trigger to send update debug level event */
5764 mUpdateDebugLevel = true;
5765 pthread_mutex_unlock(&mMutex);
5766 return;
5767}
5768
5769/*===========================================================================
5770 * FUNCTION : flush
5771 *
5772 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5773 * conditionally restarts channels
5774 *
5775 * PARAMETERS :
5776 * @ restartChannels: re-start all channels
5777 *
5778 *
5779 * RETURN :
5780 * 0 on success
5781 * Error code on failure
5782 *==========================================================================*/
5783int QCamera3HardwareInterface::flush(bool restartChannels)
5784{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005785 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005786 int32_t rc = NO_ERROR;
5787
5788 LOGD("Unblocking Process Capture Request");
5789 pthread_mutex_lock(&mMutex);
5790 mFlush = true;
5791 pthread_mutex_unlock(&mMutex);
5792
5793 rc = stopAllChannels();
5794 // unlink of dualcam
5795 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005796 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5797 &m_pDualCamCmdPtr->bundle_info;
5798 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005799 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5800 pthread_mutex_lock(&gCamLock);
5801
5802 if (mIsMainCamera == 1) {
5803 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5804 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005805 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005806 // related session id should be session id of linked session
5807 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5808 } else {
5809 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5810 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005811 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005812 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5813 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005814 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005815 pthread_mutex_unlock(&gCamLock);
5816
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005817 rc = mCameraHandle->ops->set_dual_cam_cmd(
5818 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005819 if (rc < 0) {
5820 LOGE("Dualcam: Unlink failed, but still proceed to close");
5821 }
5822 }
5823
5824 if (rc < 0) {
5825 LOGE("stopAllChannels failed");
5826 return rc;
5827 }
5828 if (mChannelHandle) {
5829 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5830 mChannelHandle);
5831 }
5832
5833 // Reset bundle info
5834 rc = setBundleInfo();
5835 if (rc < 0) {
5836 LOGE("setBundleInfo failed %d", rc);
5837 return rc;
5838 }
5839
5840 // Mutex Lock
5841 pthread_mutex_lock(&mMutex);
5842
5843 // Unblock process_capture_request
5844 mPendingLiveRequest = 0;
5845 pthread_cond_signal(&mRequestCond);
5846
5847 rc = notifyErrorForPendingRequests();
5848 if (rc < 0) {
5849 LOGE("notifyErrorForPendingRequests failed");
5850 pthread_mutex_unlock(&mMutex);
5851 return rc;
5852 }
5853
5854 mFlush = false;
5855
5856 // Start the Streams/Channels
5857 if (restartChannels) {
5858 rc = startAllChannels();
5859 if (rc < 0) {
5860 LOGE("startAllChannels failed");
5861 pthread_mutex_unlock(&mMutex);
5862 return rc;
5863 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005864 if (mChannelHandle) {
5865 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5866 mChannelHandle);
5867 if (rc < 0) {
5868 LOGE("start_channel failed");
5869 pthread_mutex_unlock(&mMutex);
5870 return rc;
5871 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005872 }
5873 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005874 pthread_mutex_unlock(&mMutex);
5875
5876 return 0;
5877}
5878
5879/*===========================================================================
5880 * FUNCTION : flushPerf
5881 *
5882 * DESCRIPTION: This is the performance optimization version of flush that does
5883 * not use stream off, rather flushes the system
5884 *
5885 * PARAMETERS :
5886 *
5887 *
5888 * RETURN : 0 : success
5889 * -EINVAL: input is malformed (device is not valid)
5890 * -ENODEV: if the device has encountered a serious error
5891 *==========================================================================*/
5892int QCamera3HardwareInterface::flushPerf()
5893{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005894 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005895 int32_t rc = 0;
5896 struct timespec timeout;
5897 bool timed_wait = false;
5898
5899 pthread_mutex_lock(&mMutex);
5900 mFlushPerf = true;
5901 mPendingBuffersMap.numPendingBufsAtFlush =
5902 mPendingBuffersMap.get_num_overall_buffers();
5903 LOGD("Calling flush. Wait for %d buffers to return",
5904 mPendingBuffersMap.numPendingBufsAtFlush);
5905
5906 /* send the flush event to the backend */
5907 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5908 if (rc < 0) {
5909 LOGE("Error in flush: IOCTL failure");
5910 mFlushPerf = false;
5911 pthread_mutex_unlock(&mMutex);
5912 return -ENODEV;
5913 }
5914
5915 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5916 LOGD("No pending buffers in HAL, return flush");
5917 mFlushPerf = false;
5918 pthread_mutex_unlock(&mMutex);
5919 return rc;
5920 }
5921
5922 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005923 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005924 if (rc < 0) {
5925 LOGE("Error reading the real time clock, cannot use timed wait");
5926 } else {
5927 timeout.tv_sec += FLUSH_TIMEOUT;
5928 timed_wait = true;
5929 }
5930
5931 //Block on conditional variable
5932 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5933 LOGD("Waiting on mBuffersCond");
5934 if (!timed_wait) {
5935 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5936 if (rc != 0) {
5937 LOGE("pthread_cond_wait failed due to rc = %s",
5938 strerror(rc));
5939 break;
5940 }
5941 } else {
5942 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5943 if (rc != 0) {
5944 LOGE("pthread_cond_timedwait failed due to rc = %s",
5945 strerror(rc));
5946 break;
5947 }
5948 }
5949 }
5950 if (rc != 0) {
5951 mFlushPerf = false;
5952 pthread_mutex_unlock(&mMutex);
5953 return -ENODEV;
5954 }
5955
5956 LOGD("Received buffers, now safe to return them");
5957
5958 //make sure the channels handle flush
5959 //currently only required for the picture channel to release snapshot resources
5960 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5961 it != mStreamInfo.end(); it++) {
5962 QCamera3Channel *channel = (*it)->channel;
5963 if (channel) {
5964 rc = channel->flush();
5965 if (rc) {
5966 LOGE("Flushing the channels failed with error %d", rc);
5967 // even though the channel flush failed we need to continue and
5968 // return the buffers we have to the framework, however the return
5969 // value will be an error
5970 rc = -ENODEV;
5971 }
5972 }
5973 }
5974
5975 /* notify the frameworks and send errored results */
5976 rc = notifyErrorForPendingRequests();
5977 if (rc < 0) {
5978 LOGE("notifyErrorForPendingRequests failed");
5979 pthread_mutex_unlock(&mMutex);
5980 return rc;
5981 }
5982
5983 //unblock process_capture_request
5984 mPendingLiveRequest = 0;
5985 unblockRequestIfNecessary();
5986
5987 mFlushPerf = false;
5988 pthread_mutex_unlock(&mMutex);
5989 LOGD ("Flush Operation complete. rc = %d", rc);
5990 return rc;
5991}
5992
5993/*===========================================================================
5994 * FUNCTION : handleCameraDeviceError
5995 *
5996 * DESCRIPTION: This function calls internal flush and notifies the error to
5997 * framework and updates the state variable.
5998 *
5999 * PARAMETERS : None
6000 *
6001 * RETURN : NO_ERROR on Success
6002 * Error code on failure
6003 *==========================================================================*/
6004int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6005{
6006 int32_t rc = NO_ERROR;
6007
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006008 {
6009 Mutex::Autolock lock(mFlushLock);
6010 pthread_mutex_lock(&mMutex);
6011 if (mState != ERROR) {
6012 //if mState != ERROR, nothing to be done
6013 pthread_mutex_unlock(&mMutex);
6014 return NO_ERROR;
6015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006016 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006017
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006018 rc = flush(false /* restart channels */);
6019 if (NO_ERROR != rc) {
6020 LOGE("internal flush to handle mState = ERROR failed");
6021 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006022
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006023 pthread_mutex_lock(&mMutex);
6024 mState = DEINIT;
6025 pthread_mutex_unlock(&mMutex);
6026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006027
6028 camera3_notify_msg_t notify_msg;
6029 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6030 notify_msg.type = CAMERA3_MSG_ERROR;
6031 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6032 notify_msg.message.error.error_stream = NULL;
6033 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006034 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006035
6036 return rc;
6037}
6038
6039/*===========================================================================
6040 * FUNCTION : captureResultCb
6041 *
6042 * DESCRIPTION: Callback handler for all capture result
6043 * (streams, as well as metadata)
6044 *
6045 * PARAMETERS :
6046 * @metadata : metadata information
6047 * @buffer : actual gralloc buffer to be returned to frameworks.
6048 * NULL if metadata.
6049 *
6050 * RETURN : NONE
6051 *==========================================================================*/
6052void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6053 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6054{
6055 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006056 pthread_mutex_lock(&mMutex);
6057 uint8_t batchSize = mBatchSize;
6058 pthread_mutex_unlock(&mMutex);
6059 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006060 handleBatchMetadata(metadata_buf,
6061 true /* free_and_bufdone_meta_buf */);
6062 } else { /* mBatchSize = 0 */
6063 hdrPlusPerfLock(metadata_buf);
6064 pthread_mutex_lock(&mMutex);
6065 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006066 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006067 false /* first frame of batch metadata */ ,
6068 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006069 pthread_mutex_unlock(&mMutex);
6070 }
6071 } else if (isInputBuffer) {
6072 pthread_mutex_lock(&mMutex);
6073 handleInputBufferWithLock(frame_number);
6074 pthread_mutex_unlock(&mMutex);
6075 } else {
6076 pthread_mutex_lock(&mMutex);
6077 handleBufferWithLock(buffer, frame_number);
6078 pthread_mutex_unlock(&mMutex);
6079 }
6080 return;
6081}
6082
6083/*===========================================================================
6084 * FUNCTION : getReprocessibleOutputStreamId
6085 *
6086 * DESCRIPTION: Get source output stream id for the input reprocess stream
6087 * based on size and format, which would be the largest
6088 * output stream if an input stream exists.
6089 *
6090 * PARAMETERS :
6091 * @id : return the stream id if found
6092 *
6093 * RETURN : int32_t type of status
6094 * NO_ERROR -- success
6095 * none-zero failure code
6096 *==========================================================================*/
6097int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6098{
6099 /* check if any output or bidirectional stream with the same size and format
6100 and return that stream */
6101 if ((mInputStreamInfo.dim.width > 0) &&
6102 (mInputStreamInfo.dim.height > 0)) {
6103 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6104 it != mStreamInfo.end(); it++) {
6105
6106 camera3_stream_t *stream = (*it)->stream;
6107 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6108 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6109 (stream->format == mInputStreamInfo.format)) {
6110 // Usage flag for an input stream and the source output stream
6111 // may be different.
6112 LOGD("Found reprocessible output stream! %p", *it);
6113 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6114 stream->usage, mInputStreamInfo.usage);
6115
6116 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6117 if (channel != NULL && channel->mStreams[0]) {
6118 id = channel->mStreams[0]->getMyServerID();
6119 return NO_ERROR;
6120 }
6121 }
6122 }
6123 } else {
6124 LOGD("No input stream, so no reprocessible output stream");
6125 }
6126 return NAME_NOT_FOUND;
6127}
6128
6129/*===========================================================================
6130 * FUNCTION : lookupFwkName
6131 *
6132 * DESCRIPTION: In case the enum is not same in fwk and backend
6133 * make sure the parameter is correctly propogated
6134 *
6135 * PARAMETERS :
6136 * @arr : map between the two enums
6137 * @len : len of the map
6138 * @hal_name : name of the hal_parm to map
6139 *
6140 * RETURN : int type of status
6141 * fwk_name -- success
6142 * none-zero failure code
6143 *==========================================================================*/
6144template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6145 size_t len, halType hal_name)
6146{
6147
6148 for (size_t i = 0; i < len; i++) {
6149 if (arr[i].hal_name == hal_name) {
6150 return arr[i].fwk_name;
6151 }
6152 }
6153
6154 /* Not able to find matching framework type is not necessarily
6155 * an error case. This happens when mm-camera supports more attributes
6156 * than the frameworks do */
6157 LOGH("Cannot find matching framework type");
6158 return NAME_NOT_FOUND;
6159}
6160
6161/*===========================================================================
6162 * FUNCTION : lookupHalName
6163 *
6164 * DESCRIPTION: In case the enum is not same in fwk and backend
6165 * make sure the parameter is correctly propogated
6166 *
6167 * PARAMETERS :
6168 * @arr : map between the two enums
6169 * @len : len of the map
6170 * @fwk_name : name of the hal_parm to map
6171 *
6172 * RETURN : int32_t type of status
6173 * hal_name -- success
6174 * none-zero failure code
6175 *==========================================================================*/
6176template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6177 size_t len, fwkType fwk_name)
6178{
6179 for (size_t i = 0; i < len; i++) {
6180 if (arr[i].fwk_name == fwk_name) {
6181 return arr[i].hal_name;
6182 }
6183 }
6184
6185 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6186 return NAME_NOT_FOUND;
6187}
6188
6189/*===========================================================================
6190 * FUNCTION : lookupProp
6191 *
6192 * DESCRIPTION: lookup a value by its name
6193 *
6194 * PARAMETERS :
6195 * @arr : map between the two enums
6196 * @len : size of the map
6197 * @name : name to be looked up
6198 *
6199 * RETURN : Value if found
6200 * CAM_CDS_MODE_MAX if not found
6201 *==========================================================================*/
6202template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6203 size_t len, const char *name)
6204{
6205 if (name) {
6206 for (size_t i = 0; i < len; i++) {
6207 if (!strcmp(arr[i].desc, name)) {
6208 return arr[i].val;
6209 }
6210 }
6211 }
6212 return CAM_CDS_MODE_MAX;
6213}
6214
6215/*===========================================================================
6216 *
6217 * DESCRIPTION:
6218 *
6219 * PARAMETERS :
6220 * @metadata : metadata information from callback
6221 * @timestamp: metadata buffer timestamp
6222 * @request_id: request id
6223 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006224 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006225 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6226 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006227 * @pprocDone: whether internal offline postprocsesing is done
6228 *
6229 * RETURN : camera_metadata_t*
6230 * metadata in a format specified by fwk
6231 *==========================================================================*/
6232camera_metadata_t*
6233QCamera3HardwareInterface::translateFromHalMetadata(
6234 metadata_buffer_t *metadata,
6235 nsecs_t timestamp,
6236 int32_t request_id,
6237 const CameraMetadata& jpegMetadata,
6238 uint8_t pipeline_depth,
6239 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006240 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006241 /* DevCamDebug metadata translateFromHalMetadata argument */
6242 uint8_t DevCamDebug_meta_enable,
6243 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006244 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006245 uint8_t fwk_cacMode,
6246 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006247{
6248 CameraMetadata camMetadata;
6249 camera_metadata_t *resultMetadata;
6250
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006251 if (mBatchSize && !firstMetadataInBatch) {
6252 /* In batch mode, use cached metadata from the first metadata
6253 in the batch */
6254 camMetadata.clear();
6255 camMetadata = mCachedMetadata;
6256 }
6257
Thierry Strudel3d639192016-09-09 11:52:26 -07006258 if (jpegMetadata.entryCount())
6259 camMetadata.append(jpegMetadata);
6260
6261 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6262 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6263 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6264 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006265 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006266 if (mBatchSize == 0) {
6267 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6268 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6269 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006270
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006271 if (mBatchSize && !firstMetadataInBatch) {
6272 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6273 resultMetadata = camMetadata.release();
6274 return resultMetadata;
6275 }
6276
Samuel Ha68ba5172016-12-15 18:41:12 -08006277 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6278 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6279 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6280 // DevCamDebug metadata translateFromHalMetadata AF
6281 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6282 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6283 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6284 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6285 }
6286 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6287 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6288 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6289 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6290 }
6291 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6292 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6293 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6294 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6295 }
6296 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6297 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6298 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6299 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6300 }
6301 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6302 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6303 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6304 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6305 }
6306 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6307 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6308 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6309 *DevCamDebug_af_monitor_pdaf_target_pos;
6310 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6311 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6312 }
6313 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6314 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6315 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6316 *DevCamDebug_af_monitor_pdaf_confidence;
6317 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6318 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6319 }
6320 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6321 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6322 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6323 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6324 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6325 }
6326 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6327 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6328 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6329 *DevCamDebug_af_monitor_tof_target_pos;
6330 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6331 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6332 }
6333 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6334 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6335 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6336 *DevCamDebug_af_monitor_tof_confidence;
6337 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6338 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6339 }
6340 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6341 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6342 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6343 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6344 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6345 }
6346 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6347 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6348 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6349 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6350 &fwk_DevCamDebug_af_monitor_type_select, 1);
6351 }
6352 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6353 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6354 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6355 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6356 &fwk_DevCamDebug_af_monitor_refocus, 1);
6357 }
6358 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6359 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6360 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6361 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6362 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6363 }
6364 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6365 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6366 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6367 *DevCamDebug_af_search_pdaf_target_pos;
6368 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6369 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6370 }
6371 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6372 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6373 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6374 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6375 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6376 }
6377 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6378 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6379 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6380 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6381 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6382 }
6383 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6384 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6385 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6386 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6387 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6388 }
6389 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6390 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6391 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6392 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6393 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6394 }
6395 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6396 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6397 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6398 *DevCamDebug_af_search_tof_target_pos;
6399 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6400 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6401 }
6402 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6403 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6404 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6405 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6406 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6407 }
6408 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6409 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6410 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6411 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6412 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6413 }
6414 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6415 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6416 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6417 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6418 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6419 }
6420 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6421 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6422 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6423 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6424 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6425 }
6426 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6427 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6428 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6429 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6430 &fwk_DevCamDebug_af_search_type_select, 1);
6431 }
6432 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6433 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6434 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6435 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6436 &fwk_DevCamDebug_af_search_next_pos, 1);
6437 }
6438 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6439 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6440 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6441 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6442 &fwk_DevCamDebug_af_search_target_pos, 1);
6443 }
6444 // DevCamDebug metadata translateFromHalMetadata AEC
6445 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6446 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6447 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6448 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6449 }
6450 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6451 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6452 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6453 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6454 }
6455 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6456 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6457 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6458 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6459 }
6460 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6461 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6462 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6463 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6464 }
6465 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6466 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6467 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6468 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6469 }
6470 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6471 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6472 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6473 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6474 }
6475 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6476 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6477 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6478 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6479 }
6480 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6481 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6482 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6483 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6484 }
Samuel Ha34229982017-02-17 13:51:11 -08006485 // DevCamDebug metadata translateFromHalMetadata zzHDR
6486 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6487 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6488 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6489 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6490 }
6491 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6492 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6493 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6494 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6495 }
6496 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6497 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6498 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6499 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6500 }
6501 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6502 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6503 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6504 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6505 }
6506 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6507 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6508 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6509 *DevCamDebug_aec_hdr_sensitivity_ratio;
6510 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6511 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6512 }
6513 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6514 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6515 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6516 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6517 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6518 }
6519 // DevCamDebug metadata translateFromHalMetadata ADRC
6520 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6521 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6522 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6523 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6524 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6525 }
6526 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6527 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6528 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6529 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6530 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6531 }
6532 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6533 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6534 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6535 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6536 }
6537 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6538 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6539 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6540 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6541 }
6542 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6543 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6544 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6545 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6546 }
6547 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6548 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6549 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6550 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6551 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006552 // DevCamDebug metadata translateFromHalMetadata AWB
6553 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6554 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6555 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6556 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6557 }
6558 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6559 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6560 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6561 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6562 }
6563 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6564 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6565 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6566 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6567 }
6568 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6569 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6570 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6571 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6572 }
6573 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6574 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6575 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6576 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6577 }
6578 }
6579 // atrace_end(ATRACE_TAG_ALWAYS);
6580
Thierry Strudel3d639192016-09-09 11:52:26 -07006581 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6582 int64_t fwk_frame_number = *frame_number;
6583 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6584 }
6585
6586 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6587 int32_t fps_range[2];
6588 fps_range[0] = (int32_t)float_range->min_fps;
6589 fps_range[1] = (int32_t)float_range->max_fps;
6590 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6591 fps_range, 2);
6592 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6593 fps_range[0], fps_range[1]);
6594 }
6595
6596 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6597 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6598 }
6599
6600 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6601 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6602 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6603 *sceneMode);
6604 if (NAME_NOT_FOUND != val) {
6605 uint8_t fwkSceneMode = (uint8_t)val;
6606 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6607 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6608 fwkSceneMode);
6609 }
6610 }
6611
6612 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6613 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6614 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6615 }
6616
6617 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6618 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6619 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6620 }
6621
6622 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6623 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6624 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6625 }
6626
6627 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6628 CAM_INTF_META_EDGE_MODE, metadata) {
6629 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6630 }
6631
6632 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6633 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6634 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6635 }
6636
6637 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6638 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6639 }
6640
6641 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6642 if (0 <= *flashState) {
6643 uint8_t fwk_flashState = (uint8_t) *flashState;
6644 if (!gCamCapability[mCameraId]->flash_available) {
6645 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6646 }
6647 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6648 }
6649 }
6650
6651 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6652 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6653 if (NAME_NOT_FOUND != val) {
6654 uint8_t fwk_flashMode = (uint8_t)val;
6655 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6656 }
6657 }
6658
6659 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6660 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6661 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6662 }
6663
6664 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6665 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6666 }
6667
6668 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6669 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6670 }
6671
6672 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6673 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6674 }
6675
6676 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6677 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6678 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6679 }
6680
6681 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6682 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6683 LOGD("fwk_videoStab = %d", fwk_videoStab);
6684 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6685 } else {
6686 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6687 // and so hardcoding the Video Stab result to OFF mode.
6688 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6689 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006690 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006691 }
6692
6693 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6694 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6695 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6696 }
6697
6698 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6699 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6700 }
6701
Thierry Strudel3d639192016-09-09 11:52:26 -07006702 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6703 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006704 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006705
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006706 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6707 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006708
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006709 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006710 blackLevelAppliedPattern->cam_black_level[0],
6711 blackLevelAppliedPattern->cam_black_level[1],
6712 blackLevelAppliedPattern->cam_black_level[2],
6713 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006714 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6715 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006716
6717#ifndef USE_HAL_3_3
6718 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006719 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6720 // depth space.
6721 fwk_blackLevelInd[0] /= 4.0;
6722 fwk_blackLevelInd[1] /= 4.0;
6723 fwk_blackLevelInd[2] /= 4.0;
6724 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006725 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6726 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006727#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006728 }
6729
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006730#ifndef USE_HAL_3_3
6731 // Fixed whitelevel is used by ISP/Sensor
6732 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6733 &gCamCapability[mCameraId]->white_level, 1);
6734#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006735
6736 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6737 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6738 int32_t scalerCropRegion[4];
6739 scalerCropRegion[0] = hScalerCropRegion->left;
6740 scalerCropRegion[1] = hScalerCropRegion->top;
6741 scalerCropRegion[2] = hScalerCropRegion->width;
6742 scalerCropRegion[3] = hScalerCropRegion->height;
6743
6744 // Adjust crop region from sensor output coordinate system to active
6745 // array coordinate system.
6746 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6747 scalerCropRegion[2], scalerCropRegion[3]);
6748
6749 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6750 }
6751
6752 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6753 LOGD("sensorExpTime = %lld", *sensorExpTime);
6754 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6755 }
6756
6757 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6758 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6759 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6760 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6761 }
6762
6763 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6764 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6765 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6766 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6767 sensorRollingShutterSkew, 1);
6768 }
6769
6770 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6771 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6772 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6773
6774 //calculate the noise profile based on sensitivity
6775 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6776 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6777 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6778 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6779 noise_profile[i] = noise_profile_S;
6780 noise_profile[i+1] = noise_profile_O;
6781 }
6782 LOGD("noise model entry (S, O) is (%f, %f)",
6783 noise_profile_S, noise_profile_O);
6784 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6785 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6786 }
6787
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006788#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006789 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006790 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006791 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006792 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006793 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6794 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6795 }
6796 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006797#endif
6798
Thierry Strudel3d639192016-09-09 11:52:26 -07006799 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6800 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6801 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6802 }
6803
6804 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6805 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6806 *faceDetectMode);
6807 if (NAME_NOT_FOUND != val) {
6808 uint8_t fwk_faceDetectMode = (uint8_t)val;
6809 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6810
6811 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6812 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6813 CAM_INTF_META_FACE_DETECTION, metadata) {
6814 uint8_t numFaces = MIN(
6815 faceDetectionInfo->num_faces_detected, MAX_ROI);
6816 int32_t faceIds[MAX_ROI];
6817 uint8_t faceScores[MAX_ROI];
6818 int32_t faceRectangles[MAX_ROI * 4];
6819 int32_t faceLandmarks[MAX_ROI * 6];
6820 size_t j = 0, k = 0;
6821
6822 for (size_t i = 0; i < numFaces; i++) {
6823 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6824 // Adjust crop region from sensor output coordinate system to active
6825 // array coordinate system.
6826 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6827 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6828 rect.width, rect.height);
6829
6830 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6831 faceRectangles+j, -1);
6832
6833 j+= 4;
6834 }
6835 if (numFaces <= 0) {
6836 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6837 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6838 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6839 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6840 }
6841
6842 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6843 numFaces);
6844 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6845 faceRectangles, numFaces * 4U);
6846 if (fwk_faceDetectMode ==
6847 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6848 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6849 CAM_INTF_META_FACE_LANDMARK, metadata) {
6850
6851 for (size_t i = 0; i < numFaces; i++) {
6852 // Map the co-ordinate sensor output coordinate system to active
6853 // array coordinate system.
6854 mCropRegionMapper.toActiveArray(
6855 landmarks->face_landmarks[i].left_eye_center.x,
6856 landmarks->face_landmarks[i].left_eye_center.y);
6857 mCropRegionMapper.toActiveArray(
6858 landmarks->face_landmarks[i].right_eye_center.x,
6859 landmarks->face_landmarks[i].right_eye_center.y);
6860 mCropRegionMapper.toActiveArray(
6861 landmarks->face_landmarks[i].mouth_center.x,
6862 landmarks->face_landmarks[i].mouth_center.y);
6863
6864 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006865 k+= TOTAL_LANDMARK_INDICES;
6866 }
6867 } else {
6868 for (size_t i = 0; i < numFaces; i++) {
6869 setInvalidLandmarks(faceLandmarks+k);
6870 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006871 }
6872 }
6873
6874 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6875 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6876 faceLandmarks, numFaces * 6U);
6877 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006878 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6879 CAM_INTF_META_FACE_BLINK, metadata) {
6880 uint8_t detected[MAX_ROI];
6881 uint8_t degree[MAX_ROI * 2];
6882 for (size_t i = 0; i < numFaces; i++) {
6883 detected[i] = blinks->blink[i].blink_detected;
6884 degree[2 * i] = blinks->blink[i].left_blink;
6885 degree[2 * i + 1] = blinks->blink[i].right_blink;
6886 }
6887 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6888 detected, numFaces);
6889 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6890 degree, numFaces * 2);
6891 }
6892 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6893 CAM_INTF_META_FACE_SMILE, metadata) {
6894 uint8_t degree[MAX_ROI];
6895 uint8_t confidence[MAX_ROI];
6896 for (size_t i = 0; i < numFaces; i++) {
6897 degree[i] = smiles->smile[i].smile_degree;
6898 confidence[i] = smiles->smile[i].smile_confidence;
6899 }
6900 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6901 degree, numFaces);
6902 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6903 confidence, numFaces);
6904 }
6905 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6906 CAM_INTF_META_FACE_GAZE, metadata) {
6907 int8_t angle[MAX_ROI];
6908 int32_t direction[MAX_ROI * 3];
6909 int8_t degree[MAX_ROI * 2];
6910 for (size_t i = 0; i < numFaces; i++) {
6911 angle[i] = gazes->gaze[i].gaze_angle;
6912 direction[3 * i] = gazes->gaze[i].updown_dir;
6913 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6914 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6915 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6916 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6917 }
6918 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6919 (uint8_t *)angle, numFaces);
6920 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6921 direction, numFaces * 3);
6922 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6923 (uint8_t *)degree, numFaces * 2);
6924 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006925 }
6926 }
6927 }
6928 }
6929
6930 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6931 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006932 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006933
Thierry Strudel54dc9782017-02-15 12:12:10 -08006934 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006935 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6936 // process histogram statistics info
Thierry Strudel54dc9782017-02-15 12:12:10 -08006937 uint32_t hist_buf[4][CAM_HISTOGRAM_STATS_SIZE];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006938 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006939 cam_histogram_data_t rHistData, grHistData, gbHistData, bHistData;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006940 memset(&rHistData, 0, sizeof(rHistData));
Thierry Strudel54dc9782017-02-15 12:12:10 -08006941 memset(&grHistData, 0, sizeof(grHistData));
6942 memset(&gbHistData, 0, sizeof(gbHistData));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006943 memset(&bHistData, 0, sizeof(bHistData));
6944
6945 switch (stats_data->type) {
6946 case CAM_HISTOGRAM_TYPE_BAYER:
6947 switch (stats_data->bayer_stats.data_type) {
6948 case CAM_STATS_CHANNEL_GR:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006949 rHistData = grHistData = gbHistData = bHistData =
6950 stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006951 break;
6952 case CAM_STATS_CHANNEL_GB:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006953 rHistData = grHistData = gbHistData = bHistData =
6954 stats_data->bayer_stats.gb_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006955 break;
6956 case CAM_STATS_CHANNEL_B:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006957 rHistData = grHistData = gbHistData = bHistData =
6958 stats_data->bayer_stats.b_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006959 break;
6960 case CAM_STATS_CHANNEL_ALL:
6961 rHistData = stats_data->bayer_stats.r_stats;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006962 gbHistData = stats_data->bayer_stats.gb_stats;
6963 grHistData = stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006964 bHistData = stats_data->bayer_stats.b_stats;
6965 break;
6966 case CAM_STATS_CHANNEL_Y:
6967 case CAM_STATS_CHANNEL_R:
6968 default:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006969 rHistData = grHistData = gbHistData = bHistData =
6970 stats_data->bayer_stats.r_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006971 break;
6972 }
6973 break;
6974 case CAM_HISTOGRAM_TYPE_YUV:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006975 rHistData = grHistData = gbHistData = bHistData =
6976 stats_data->yuv_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006977 break;
6978 }
6979
6980 memcpy(hist_buf, rHistData.hist_buf, hist_size);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006981 memcpy(hist_buf[1], gbHistData.hist_buf, hist_size);
6982 memcpy(hist_buf[2], grHistData.hist_buf, hist_size);
6983 memcpy(hist_buf[3], bHistData.hist_buf, hist_size);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006984
Thierry Strudel54dc9782017-02-15 12:12:10 -08006985 camMetadata.update(QCAMERA3_HISTOGRAM_STATS, (int32_t*)hist_buf, hist_size*4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006986 }
6987 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006988 }
6989
6990 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6991 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6992 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6993 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6994 }
6995
6996 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6997 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6998 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6999 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7000 }
7001
7002 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7003 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7004 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7005 CAM_MAX_SHADING_MAP_HEIGHT);
7006 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7007 CAM_MAX_SHADING_MAP_WIDTH);
7008 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7009 lensShadingMap->lens_shading, 4U * map_width * map_height);
7010 }
7011
7012 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7013 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7014 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7015 }
7016
7017 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7018 //Populate CAM_INTF_META_TONEMAP_CURVES
7019 /* ch0 = G, ch 1 = B, ch 2 = R*/
7020 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7021 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7022 tonemap->tonemap_points_cnt,
7023 CAM_MAX_TONEMAP_CURVE_SIZE);
7024 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7025 }
7026
7027 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7028 &tonemap->curves[0].tonemap_points[0][0],
7029 tonemap->tonemap_points_cnt * 2);
7030
7031 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7032 &tonemap->curves[1].tonemap_points[0][0],
7033 tonemap->tonemap_points_cnt * 2);
7034
7035 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7036 &tonemap->curves[2].tonemap_points[0][0],
7037 tonemap->tonemap_points_cnt * 2);
7038 }
7039
7040 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7041 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7042 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7043 CC_GAIN_MAX);
7044 }
7045
7046 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7047 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7048 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7049 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7050 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7051 }
7052
7053 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7054 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7055 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7056 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7057 toneCurve->tonemap_points_cnt,
7058 CAM_MAX_TONEMAP_CURVE_SIZE);
7059 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7060 }
7061 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7062 (float*)toneCurve->curve.tonemap_points,
7063 toneCurve->tonemap_points_cnt * 2);
7064 }
7065
7066 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7067 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7068 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7069 predColorCorrectionGains->gains, 4);
7070 }
7071
7072 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7073 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7074 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7075 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7076 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7077 }
7078
7079 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7080 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7081 }
7082
7083 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7084 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7085 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7086 }
7087
7088 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7089 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7090 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7091 }
7092
7093 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7094 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7095 *effectMode);
7096 if (NAME_NOT_FOUND != val) {
7097 uint8_t fwk_effectMode = (uint8_t)val;
7098 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7099 }
7100 }
7101
7102 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7103 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7104 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7105 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7106 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7107 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7108 }
7109 int32_t fwk_testPatternData[4];
7110 fwk_testPatternData[0] = testPatternData->r;
7111 fwk_testPatternData[3] = testPatternData->b;
7112 switch (gCamCapability[mCameraId]->color_arrangement) {
7113 case CAM_FILTER_ARRANGEMENT_RGGB:
7114 case CAM_FILTER_ARRANGEMENT_GRBG:
7115 fwk_testPatternData[1] = testPatternData->gr;
7116 fwk_testPatternData[2] = testPatternData->gb;
7117 break;
7118 case CAM_FILTER_ARRANGEMENT_GBRG:
7119 case CAM_FILTER_ARRANGEMENT_BGGR:
7120 fwk_testPatternData[2] = testPatternData->gr;
7121 fwk_testPatternData[1] = testPatternData->gb;
7122 break;
7123 default:
7124 LOGE("color arrangement %d is not supported",
7125 gCamCapability[mCameraId]->color_arrangement);
7126 break;
7127 }
7128 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7129 }
7130
7131 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7132 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7133 }
7134
7135 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7136 String8 str((const char *)gps_methods);
7137 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7138 }
7139
7140 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7141 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7142 }
7143
7144 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7145 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7146 }
7147
7148 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7149 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7150 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7151 }
7152
7153 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7154 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7155 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7156 }
7157
7158 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7159 int32_t fwk_thumb_size[2];
7160 fwk_thumb_size[0] = thumb_size->width;
7161 fwk_thumb_size[1] = thumb_size->height;
7162 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7163 }
7164
7165 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7166 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7167 privateData,
7168 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7169 }
7170
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007171 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007172 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007173 meteringMode, 1);
7174 }
7175
Thierry Strudel54dc9782017-02-15 12:12:10 -08007176 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7177 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7178 LOGD("hdr_scene_data: %d %f\n",
7179 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7180 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7181 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7182 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7183 &isHdr, 1);
7184 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7185 &isHdrConfidence, 1);
7186 }
7187
7188
7189
Thierry Strudel3d639192016-09-09 11:52:26 -07007190 if (metadata->is_tuning_params_valid) {
7191 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7192 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7193 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7194
7195
7196 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7197 sizeof(uint32_t));
7198 data += sizeof(uint32_t);
7199
7200 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7201 sizeof(uint32_t));
7202 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7203 data += sizeof(uint32_t);
7204
7205 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7206 sizeof(uint32_t));
7207 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7208 data += sizeof(uint32_t);
7209
7210 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7211 sizeof(uint32_t));
7212 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7213 data += sizeof(uint32_t);
7214
7215 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7216 sizeof(uint32_t));
7217 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7218 data += sizeof(uint32_t);
7219
7220 metadata->tuning_params.tuning_mod3_data_size = 0;
7221 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7222 sizeof(uint32_t));
7223 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7224 data += sizeof(uint32_t);
7225
7226 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7227 TUNING_SENSOR_DATA_MAX);
7228 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7229 count);
7230 data += count;
7231
7232 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7233 TUNING_VFE_DATA_MAX);
7234 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7235 count);
7236 data += count;
7237
7238 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7239 TUNING_CPP_DATA_MAX);
7240 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7241 count);
7242 data += count;
7243
7244 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7245 TUNING_CAC_DATA_MAX);
7246 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7247 count);
7248 data += count;
7249
7250 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7251 (int32_t *)(void *)tuning_meta_data_blob,
7252 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7253 }
7254
7255 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7256 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7257 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7258 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7259 NEUTRAL_COL_POINTS);
7260 }
7261
7262 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7263 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7264 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7265 }
7266
7267 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7268 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7269 // Adjust crop region from sensor output coordinate system to active
7270 // array coordinate system.
7271 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7272 hAeRegions->rect.width, hAeRegions->rect.height);
7273
7274 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7275 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7276 REGIONS_TUPLE_COUNT);
7277 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7278 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7279 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7280 hAeRegions->rect.height);
7281 }
7282
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007283 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7284 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7285 if (NAME_NOT_FOUND != val) {
7286 uint8_t fwkAfMode = (uint8_t)val;
7287 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7288 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7289 } else {
7290 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7291 val);
7292 }
7293 }
7294
Thierry Strudel3d639192016-09-09 11:52:26 -07007295 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7296 uint8_t fwk_afState = (uint8_t) *afState;
7297 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007298 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007299 }
7300
7301 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7302 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7303 }
7304
7305 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7306 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7307 }
7308
7309 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7310 uint8_t fwk_lensState = *lensState;
7311 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7312 }
7313
7314 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7315 /*af regions*/
7316 int32_t afRegions[REGIONS_TUPLE_COUNT];
7317 // Adjust crop region from sensor output coordinate system to active
7318 // array coordinate system.
7319 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7320 hAfRegions->rect.width, hAfRegions->rect.height);
7321
7322 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7323 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7324 REGIONS_TUPLE_COUNT);
7325 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7326 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7327 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7328 hAfRegions->rect.height);
7329 }
7330
7331 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007332 uint32_t ab_mode = *hal_ab_mode;
7333 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7334 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7335 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7336 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007337 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007338 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007339 if (NAME_NOT_FOUND != val) {
7340 uint8_t fwk_ab_mode = (uint8_t)val;
7341 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7342 }
7343 }
7344
7345 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7346 int val = lookupFwkName(SCENE_MODES_MAP,
7347 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7348 if (NAME_NOT_FOUND != val) {
7349 uint8_t fwkBestshotMode = (uint8_t)val;
7350 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7351 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7352 } else {
7353 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7354 }
7355 }
7356
7357 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7358 uint8_t fwk_mode = (uint8_t) *mode;
7359 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7360 }
7361
7362 /* Constant metadata values to be update*/
7363 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7364 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7365
7366 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7367 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7368
7369 int32_t hotPixelMap[2];
7370 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7371
7372 // CDS
7373 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7374 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7375 }
7376
Thierry Strudel04e026f2016-10-10 11:27:36 -07007377 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7378 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007379 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007380 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7381 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7382 } else {
7383 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7384 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007385
7386 if(fwk_hdr != curr_hdr_state) {
7387 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7388 if(fwk_hdr)
7389 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7390 else
7391 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7392 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007393 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7394 }
7395
Thierry Strudel54dc9782017-02-15 12:12:10 -08007396 //binning correction
7397 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7398 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7399 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7400 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7401 }
7402
Thierry Strudel04e026f2016-10-10 11:27:36 -07007403 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007404 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007405 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7406 int8_t is_ir_on = 0;
7407
7408 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7409 if(is_ir_on != curr_ir_state) {
7410 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7411 if(is_ir_on)
7412 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7413 else
7414 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7415 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007416 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007417 }
7418
Thierry Strudel269c81a2016-10-12 12:13:59 -07007419 // AEC SPEED
7420 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7421 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7422 }
7423
7424 // AWB SPEED
7425 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7426 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7427 }
7428
Thierry Strudel3d639192016-09-09 11:52:26 -07007429 // TNR
7430 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7431 uint8_t tnr_enable = tnr->denoise_enable;
7432 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007433 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7434 int8_t is_tnr_on = 0;
7435
7436 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7437 if(is_tnr_on != curr_tnr_state) {
7438 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7439 if(is_tnr_on)
7440 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7441 else
7442 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7443 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007444
7445 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7446 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7447 }
7448
7449 // Reprocess crop data
7450 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7451 uint8_t cnt = crop_data->num_of_streams;
7452 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7453 // mm-qcamera-daemon only posts crop_data for streams
7454 // not linked to pproc. So no valid crop metadata is not
7455 // necessarily an error case.
7456 LOGD("No valid crop metadata entries");
7457 } else {
7458 uint32_t reproc_stream_id;
7459 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7460 LOGD("No reprocessible stream found, ignore crop data");
7461 } else {
7462 int rc = NO_ERROR;
7463 Vector<int32_t> roi_map;
7464 int32_t *crop = new int32_t[cnt*4];
7465 if (NULL == crop) {
7466 rc = NO_MEMORY;
7467 }
7468 if (NO_ERROR == rc) {
7469 int32_t streams_found = 0;
7470 for (size_t i = 0; i < cnt; i++) {
7471 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7472 if (pprocDone) {
7473 // HAL already does internal reprocessing,
7474 // either via reprocessing before JPEG encoding,
7475 // or offline postprocessing for pproc bypass case.
7476 crop[0] = 0;
7477 crop[1] = 0;
7478 crop[2] = mInputStreamInfo.dim.width;
7479 crop[3] = mInputStreamInfo.dim.height;
7480 } else {
7481 crop[0] = crop_data->crop_info[i].crop.left;
7482 crop[1] = crop_data->crop_info[i].crop.top;
7483 crop[2] = crop_data->crop_info[i].crop.width;
7484 crop[3] = crop_data->crop_info[i].crop.height;
7485 }
7486 roi_map.add(crop_data->crop_info[i].roi_map.left);
7487 roi_map.add(crop_data->crop_info[i].roi_map.top);
7488 roi_map.add(crop_data->crop_info[i].roi_map.width);
7489 roi_map.add(crop_data->crop_info[i].roi_map.height);
7490 streams_found++;
7491 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7492 crop[0], crop[1], crop[2], crop[3]);
7493 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7494 crop_data->crop_info[i].roi_map.left,
7495 crop_data->crop_info[i].roi_map.top,
7496 crop_data->crop_info[i].roi_map.width,
7497 crop_data->crop_info[i].roi_map.height);
7498 break;
7499
7500 }
7501 }
7502 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7503 &streams_found, 1);
7504 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7505 crop, (size_t)(streams_found * 4));
7506 if (roi_map.array()) {
7507 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7508 roi_map.array(), roi_map.size());
7509 }
7510 }
7511 if (crop) {
7512 delete [] crop;
7513 }
7514 }
7515 }
7516 }
7517
7518 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7519 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7520 // so hardcoding the CAC result to OFF mode.
7521 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7522 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7523 } else {
7524 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7525 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7526 *cacMode);
7527 if (NAME_NOT_FOUND != val) {
7528 uint8_t resultCacMode = (uint8_t)val;
7529 // check whether CAC result from CB is equal to Framework set CAC mode
7530 // If not equal then set the CAC mode came in corresponding request
7531 if (fwk_cacMode != resultCacMode) {
7532 resultCacMode = fwk_cacMode;
7533 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007534 //Check if CAC is disabled by property
7535 if (m_cacModeDisabled) {
7536 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7537 }
7538
Thierry Strudel3d639192016-09-09 11:52:26 -07007539 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7540 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7541 } else {
7542 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7543 }
7544 }
7545 }
7546
7547 // Post blob of cam_cds_data through vendor tag.
7548 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7549 uint8_t cnt = cdsInfo->num_of_streams;
7550 cam_cds_data_t cdsDataOverride;
7551 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7552 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7553 cdsDataOverride.num_of_streams = 1;
7554 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7555 uint32_t reproc_stream_id;
7556 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7557 LOGD("No reprocessible stream found, ignore cds data");
7558 } else {
7559 for (size_t i = 0; i < cnt; i++) {
7560 if (cdsInfo->cds_info[i].stream_id ==
7561 reproc_stream_id) {
7562 cdsDataOverride.cds_info[0].cds_enable =
7563 cdsInfo->cds_info[i].cds_enable;
7564 break;
7565 }
7566 }
7567 }
7568 } else {
7569 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7570 }
7571 camMetadata.update(QCAMERA3_CDS_INFO,
7572 (uint8_t *)&cdsDataOverride,
7573 sizeof(cam_cds_data_t));
7574 }
7575
7576 // Ldaf calibration data
7577 if (!mLdafCalibExist) {
7578 IF_META_AVAILABLE(uint32_t, ldafCalib,
7579 CAM_INTF_META_LDAF_EXIF, metadata) {
7580 mLdafCalibExist = true;
7581 mLdafCalib[0] = ldafCalib[0];
7582 mLdafCalib[1] = ldafCalib[1];
7583 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7584 ldafCalib[0], ldafCalib[1]);
7585 }
7586 }
7587
Thierry Strudel54dc9782017-02-15 12:12:10 -08007588 // EXIF debug data through vendor tag
7589 /*
7590 * Mobicat Mask can assume 3 values:
7591 * 1 refers to Mobicat data,
7592 * 2 refers to Stats Debug and Exif Debug Data
7593 * 3 refers to Mobicat and Stats Debug Data
7594 * We want to make sure that we are sending Exif debug data
7595 * only when Mobicat Mask is 2.
7596 */
7597 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7598 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7599 (uint8_t *)(void *)mExifParams.debug_params,
7600 sizeof(mm_jpeg_debug_exif_params_t));
7601 }
7602
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007603 // Reprocess and DDM debug data through vendor tag
7604 cam_reprocess_info_t repro_info;
7605 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007606 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7607 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007608 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 }
7610 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7611 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007612 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007613 }
7614 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7615 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007616 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007617 }
7618 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7619 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007620 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007621 }
7622 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7623 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007624 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007625 }
7626 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007627 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007628 }
7629 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7630 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007631 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007632 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007633 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7634 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7635 }
7636 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7637 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7638 }
7639 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7640 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007641
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007642 // INSTANT AEC MODE
7643 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7644 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7645 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7646 }
7647
Shuzhen Wange763e802016-03-31 10:24:29 -07007648 // AF scene change
7649 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7650 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7651 }
7652
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007653 /* In batch mode, cache the first metadata in the batch */
7654 if (mBatchSize && firstMetadataInBatch) {
7655 mCachedMetadata.clear();
7656 mCachedMetadata = camMetadata;
7657 }
7658
Thierry Strudel3d639192016-09-09 11:52:26 -07007659 resultMetadata = camMetadata.release();
7660 return resultMetadata;
7661}
7662
7663/*===========================================================================
7664 * FUNCTION : saveExifParams
7665 *
7666 * DESCRIPTION:
7667 *
7668 * PARAMETERS :
7669 * @metadata : metadata information from callback
7670 *
7671 * RETURN : none
7672 *
7673 *==========================================================================*/
7674void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7675{
7676 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7677 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7678 if (mExifParams.debug_params) {
7679 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7680 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7681 }
7682 }
7683 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7684 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7685 if (mExifParams.debug_params) {
7686 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7687 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7688 }
7689 }
7690 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7691 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7692 if (mExifParams.debug_params) {
7693 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7694 mExifParams.debug_params->af_debug_params_valid = TRUE;
7695 }
7696 }
7697 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7698 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7699 if (mExifParams.debug_params) {
7700 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7701 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7702 }
7703 }
7704 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7705 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7706 if (mExifParams.debug_params) {
7707 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7708 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7709 }
7710 }
7711 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7712 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7713 if (mExifParams.debug_params) {
7714 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7715 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7716 }
7717 }
7718 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7719 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7720 if (mExifParams.debug_params) {
7721 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7722 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7723 }
7724 }
7725 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7726 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7727 if (mExifParams.debug_params) {
7728 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7729 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7730 }
7731 }
7732}
7733
7734/*===========================================================================
7735 * FUNCTION : get3AExifParams
7736 *
7737 * DESCRIPTION:
7738 *
7739 * PARAMETERS : none
7740 *
7741 *
7742 * RETURN : mm_jpeg_exif_params_t
7743 *
7744 *==========================================================================*/
7745mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7746{
7747 return mExifParams;
7748}
7749
7750/*===========================================================================
7751 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7752 *
7753 * DESCRIPTION:
7754 *
7755 * PARAMETERS :
7756 * @metadata : metadata information from callback
7757 *
7758 * RETURN : camera_metadata_t*
7759 * metadata in a format specified by fwk
7760 *==========================================================================*/
7761camera_metadata_t*
7762QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7763 (metadata_buffer_t *metadata)
7764{
7765 CameraMetadata camMetadata;
7766 camera_metadata_t *resultMetadata;
7767
7768
7769 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7770 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7771 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7772 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7773 }
7774
7775 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7776 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7777 &aecTrigger->trigger, 1);
7778 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7779 &aecTrigger->trigger_id, 1);
7780 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7781 aecTrigger->trigger);
7782 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7783 aecTrigger->trigger_id);
7784 }
7785
7786 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7787 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7788 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7789 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7790 }
7791
Thierry Strudel3d639192016-09-09 11:52:26 -07007792 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7793 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7794 &af_trigger->trigger, 1);
7795 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7796 af_trigger->trigger);
7797 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7798 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7799 af_trigger->trigger_id);
7800 }
7801
7802 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7803 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7804 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7805 if (NAME_NOT_FOUND != val) {
7806 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7807 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7808 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7809 } else {
7810 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7811 }
7812 }
7813
7814 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7815 uint32_t aeMode = CAM_AE_MODE_MAX;
7816 int32_t flashMode = CAM_FLASH_MODE_MAX;
7817 int32_t redeye = -1;
7818 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7819 aeMode = *pAeMode;
7820 }
7821 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7822 flashMode = *pFlashMode;
7823 }
7824 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7825 redeye = *pRedeye;
7826 }
7827
7828 if (1 == redeye) {
7829 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7830 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7831 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7832 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7833 flashMode);
7834 if (NAME_NOT_FOUND != val) {
7835 fwk_aeMode = (uint8_t)val;
7836 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7837 } else {
7838 LOGE("Unsupported flash mode %d", flashMode);
7839 }
7840 } else if (aeMode == CAM_AE_MODE_ON) {
7841 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7842 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7843 } else if (aeMode == CAM_AE_MODE_OFF) {
7844 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7845 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7846 } else {
7847 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7848 "flashMode:%d, aeMode:%u!!!",
7849 redeye, flashMode, aeMode);
7850 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007851 if (mInstantAEC) {
7852 // Increment frame Idx count untill a bound reached for instant AEC.
7853 mInstantAecFrameIdxCount++;
7854 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7855 CAM_INTF_META_AEC_INFO, metadata) {
7856 LOGH("ae_params->settled = %d",ae_params->settled);
7857 // If AEC settled, or if number of frames reached bound value,
7858 // should reset instant AEC.
7859 if (ae_params->settled ||
7860 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7861 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7862 mInstantAEC = false;
7863 mResetInstantAEC = true;
7864 mInstantAecFrameIdxCount = 0;
7865 }
7866 }
7867 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007868 resultMetadata = camMetadata.release();
7869 return resultMetadata;
7870}
7871
7872/*===========================================================================
7873 * FUNCTION : dumpMetadataToFile
7874 *
7875 * DESCRIPTION: Dumps tuning metadata to file system
7876 *
7877 * PARAMETERS :
7878 * @meta : tuning metadata
7879 * @dumpFrameCount : current dump frame count
7880 * @enabled : Enable mask
7881 *
7882 *==========================================================================*/
7883void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7884 uint32_t &dumpFrameCount,
7885 bool enabled,
7886 const char *type,
7887 uint32_t frameNumber)
7888{
7889 //Some sanity checks
7890 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7891 LOGE("Tuning sensor data size bigger than expected %d: %d",
7892 meta.tuning_sensor_data_size,
7893 TUNING_SENSOR_DATA_MAX);
7894 return;
7895 }
7896
7897 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7898 LOGE("Tuning VFE data size bigger than expected %d: %d",
7899 meta.tuning_vfe_data_size,
7900 TUNING_VFE_DATA_MAX);
7901 return;
7902 }
7903
7904 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7905 LOGE("Tuning CPP data size bigger than expected %d: %d",
7906 meta.tuning_cpp_data_size,
7907 TUNING_CPP_DATA_MAX);
7908 return;
7909 }
7910
7911 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7912 LOGE("Tuning CAC data size bigger than expected %d: %d",
7913 meta.tuning_cac_data_size,
7914 TUNING_CAC_DATA_MAX);
7915 return;
7916 }
7917 //
7918
7919 if(enabled){
7920 char timeBuf[FILENAME_MAX];
7921 char buf[FILENAME_MAX];
7922 memset(buf, 0, sizeof(buf));
7923 memset(timeBuf, 0, sizeof(timeBuf));
7924 time_t current_time;
7925 struct tm * timeinfo;
7926 time (&current_time);
7927 timeinfo = localtime (&current_time);
7928 if (timeinfo != NULL) {
7929 strftime (timeBuf, sizeof(timeBuf),
7930 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7931 }
7932 String8 filePath(timeBuf);
7933 snprintf(buf,
7934 sizeof(buf),
7935 "%dm_%s_%d.bin",
7936 dumpFrameCount,
7937 type,
7938 frameNumber);
7939 filePath.append(buf);
7940 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7941 if (file_fd >= 0) {
7942 ssize_t written_len = 0;
7943 meta.tuning_data_version = TUNING_DATA_VERSION;
7944 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7945 written_len += write(file_fd, data, sizeof(uint32_t));
7946 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7947 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7948 written_len += write(file_fd, data, sizeof(uint32_t));
7949 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7950 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7951 written_len += write(file_fd, data, sizeof(uint32_t));
7952 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7953 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7954 written_len += write(file_fd, data, sizeof(uint32_t));
7955 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7956 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7957 written_len += write(file_fd, data, sizeof(uint32_t));
7958 meta.tuning_mod3_data_size = 0;
7959 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7960 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7961 written_len += write(file_fd, data, sizeof(uint32_t));
7962 size_t total_size = meta.tuning_sensor_data_size;
7963 data = (void *)((uint8_t *)&meta.data);
7964 written_len += write(file_fd, data, total_size);
7965 total_size = meta.tuning_vfe_data_size;
7966 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7967 written_len += write(file_fd, data, total_size);
7968 total_size = meta.tuning_cpp_data_size;
7969 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7970 written_len += write(file_fd, data, total_size);
7971 total_size = meta.tuning_cac_data_size;
7972 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7973 written_len += write(file_fd, data, total_size);
7974 close(file_fd);
7975 }else {
7976 LOGE("fail to open file for metadata dumping");
7977 }
7978 }
7979}
7980
7981/*===========================================================================
7982 * FUNCTION : cleanAndSortStreamInfo
7983 *
7984 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7985 * and sort them such that raw stream is at the end of the list
7986 * This is a workaround for camera daemon constraint.
7987 *
7988 * PARAMETERS : None
7989 *
7990 *==========================================================================*/
7991void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7992{
7993 List<stream_info_t *> newStreamInfo;
7994
7995 /*clean up invalid streams*/
7996 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7997 it != mStreamInfo.end();) {
7998 if(((*it)->status) == INVALID){
7999 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8000 delete channel;
8001 free(*it);
8002 it = mStreamInfo.erase(it);
8003 } else {
8004 it++;
8005 }
8006 }
8007
8008 // Move preview/video/callback/snapshot streams into newList
8009 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8010 it != mStreamInfo.end();) {
8011 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8012 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8013 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8014 newStreamInfo.push_back(*it);
8015 it = mStreamInfo.erase(it);
8016 } else
8017 it++;
8018 }
8019 // Move raw streams into newList
8020 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8021 it != mStreamInfo.end();) {
8022 newStreamInfo.push_back(*it);
8023 it = mStreamInfo.erase(it);
8024 }
8025
8026 mStreamInfo = newStreamInfo;
8027}
8028
8029/*===========================================================================
8030 * FUNCTION : extractJpegMetadata
8031 *
8032 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8033 * JPEG metadata is cached in HAL, and return as part of capture
8034 * result when metadata is returned from camera daemon.
8035 *
8036 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8037 * @request: capture request
8038 *
8039 *==========================================================================*/
8040void QCamera3HardwareInterface::extractJpegMetadata(
8041 CameraMetadata& jpegMetadata,
8042 const camera3_capture_request_t *request)
8043{
8044 CameraMetadata frame_settings;
8045 frame_settings = request->settings;
8046
8047 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8048 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8049 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8050 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8051
8052 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8053 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8054 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8055 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8056
8057 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8058 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8059 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8060 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8061
8062 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8063 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8064 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8065 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8066
8067 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8068 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8069 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8070 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8071
8072 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8073 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8074 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8075 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8076
8077 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8078 int32_t thumbnail_size[2];
8079 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8080 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8081 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8082 int32_t orientation =
8083 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008084 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008085 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8086 int32_t temp;
8087 temp = thumbnail_size[0];
8088 thumbnail_size[0] = thumbnail_size[1];
8089 thumbnail_size[1] = temp;
8090 }
8091 }
8092 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8093 thumbnail_size,
8094 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8095 }
8096
8097}
8098
8099/*===========================================================================
8100 * FUNCTION : convertToRegions
8101 *
8102 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8103 *
8104 * PARAMETERS :
8105 * @rect : cam_rect_t struct to convert
8106 * @region : int32_t destination array
8107 * @weight : if we are converting from cam_area_t, weight is valid
8108 * else weight = -1
8109 *
8110 *==========================================================================*/
8111void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8112 int32_t *region, int weight)
8113{
8114 region[0] = rect.left;
8115 region[1] = rect.top;
8116 region[2] = rect.left + rect.width;
8117 region[3] = rect.top + rect.height;
8118 if (weight > -1) {
8119 region[4] = weight;
8120 }
8121}
8122
8123/*===========================================================================
8124 * FUNCTION : convertFromRegions
8125 *
8126 * DESCRIPTION: helper method to convert from array to cam_rect_t
8127 *
8128 * PARAMETERS :
8129 * @rect : cam_rect_t struct to convert
8130 * @region : int32_t destination array
8131 * @weight : if we are converting from cam_area_t, weight is valid
8132 * else weight = -1
8133 *
8134 *==========================================================================*/
8135void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008136 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008137{
Thierry Strudel3d639192016-09-09 11:52:26 -07008138 int32_t x_min = frame_settings.find(tag).data.i32[0];
8139 int32_t y_min = frame_settings.find(tag).data.i32[1];
8140 int32_t x_max = frame_settings.find(tag).data.i32[2];
8141 int32_t y_max = frame_settings.find(tag).data.i32[3];
8142 roi.weight = frame_settings.find(tag).data.i32[4];
8143 roi.rect.left = x_min;
8144 roi.rect.top = y_min;
8145 roi.rect.width = x_max - x_min;
8146 roi.rect.height = y_max - y_min;
8147}
8148
8149/*===========================================================================
8150 * FUNCTION : resetIfNeededROI
8151 *
8152 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8153 * crop region
8154 *
8155 * PARAMETERS :
8156 * @roi : cam_area_t struct to resize
8157 * @scalerCropRegion : cam_crop_region_t region to compare against
8158 *
8159 *
8160 *==========================================================================*/
8161bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8162 const cam_crop_region_t* scalerCropRegion)
8163{
8164 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8165 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8166 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8167 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8168
8169 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8170 * without having this check the calculations below to validate if the roi
8171 * is inside scalar crop region will fail resulting in the roi not being
8172 * reset causing algorithm to continue to use stale roi window
8173 */
8174 if (roi->weight == 0) {
8175 return true;
8176 }
8177
8178 if ((roi_x_max < scalerCropRegion->left) ||
8179 // right edge of roi window is left of scalar crop's left edge
8180 (roi_y_max < scalerCropRegion->top) ||
8181 // bottom edge of roi window is above scalar crop's top edge
8182 (roi->rect.left > crop_x_max) ||
8183 // left edge of roi window is beyond(right) of scalar crop's right edge
8184 (roi->rect.top > crop_y_max)){
8185 // top edge of roi windo is above scalar crop's top edge
8186 return false;
8187 }
8188 if (roi->rect.left < scalerCropRegion->left) {
8189 roi->rect.left = scalerCropRegion->left;
8190 }
8191 if (roi->rect.top < scalerCropRegion->top) {
8192 roi->rect.top = scalerCropRegion->top;
8193 }
8194 if (roi_x_max > crop_x_max) {
8195 roi_x_max = crop_x_max;
8196 }
8197 if (roi_y_max > crop_y_max) {
8198 roi_y_max = crop_y_max;
8199 }
8200 roi->rect.width = roi_x_max - roi->rect.left;
8201 roi->rect.height = roi_y_max - roi->rect.top;
8202 return true;
8203}
8204
8205/*===========================================================================
8206 * FUNCTION : convertLandmarks
8207 *
8208 * DESCRIPTION: helper method to extract the landmarks from face detection info
8209 *
8210 * PARAMETERS :
8211 * @landmark_data : input landmark data to be converted
8212 * @landmarks : int32_t destination array
8213 *
8214 *
8215 *==========================================================================*/
8216void QCamera3HardwareInterface::convertLandmarks(
8217 cam_face_landmarks_info_t landmark_data,
8218 int32_t *landmarks)
8219{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008220 if (landmark_data.is_left_eye_valid) {
8221 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8222 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8223 } else {
8224 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8225 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8226 }
8227
8228 if (landmark_data.is_right_eye_valid) {
8229 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8230 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8231 } else {
8232 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8233 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8234 }
8235
8236 if (landmark_data.is_mouth_valid) {
8237 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8238 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8239 } else {
8240 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8241 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8242 }
8243}
8244
8245/*===========================================================================
8246 * FUNCTION : setInvalidLandmarks
8247 *
8248 * DESCRIPTION: helper method to set invalid landmarks
8249 *
8250 * PARAMETERS :
8251 * @landmarks : int32_t destination array
8252 *
8253 *
8254 *==========================================================================*/
8255void QCamera3HardwareInterface::setInvalidLandmarks(
8256 int32_t *landmarks)
8257{
8258 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8259 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8260 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8261 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8262 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8263 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008264}
8265
8266#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008267
8268/*===========================================================================
8269 * FUNCTION : getCapabilities
8270 *
8271 * DESCRIPTION: query camera capability from back-end
8272 *
8273 * PARAMETERS :
8274 * @ops : mm-interface ops structure
8275 * @cam_handle : camera handle for which we need capability
8276 *
8277 * RETURN : ptr type of capability structure
8278 * capability for success
8279 * NULL for failure
8280 *==========================================================================*/
8281cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8282 uint32_t cam_handle)
8283{
8284 int rc = NO_ERROR;
8285 QCamera3HeapMemory *capabilityHeap = NULL;
8286 cam_capability_t *cap_ptr = NULL;
8287
8288 if (ops == NULL) {
8289 LOGE("Invalid arguments");
8290 return NULL;
8291 }
8292
8293 capabilityHeap = new QCamera3HeapMemory(1);
8294 if (capabilityHeap == NULL) {
8295 LOGE("creation of capabilityHeap failed");
8296 return NULL;
8297 }
8298
8299 /* Allocate memory for capability buffer */
8300 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8301 if(rc != OK) {
8302 LOGE("No memory for cappability");
8303 goto allocate_failed;
8304 }
8305
8306 /* Map memory for capability buffer */
8307 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8308
8309 rc = ops->map_buf(cam_handle,
8310 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8311 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8312 if(rc < 0) {
8313 LOGE("failed to map capability buffer");
8314 rc = FAILED_TRANSACTION;
8315 goto map_failed;
8316 }
8317
8318 /* Query Capability */
8319 rc = ops->query_capability(cam_handle);
8320 if(rc < 0) {
8321 LOGE("failed to query capability");
8322 rc = FAILED_TRANSACTION;
8323 goto query_failed;
8324 }
8325
8326 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8327 if (cap_ptr == NULL) {
8328 LOGE("out of memory");
8329 rc = NO_MEMORY;
8330 goto query_failed;
8331 }
8332
8333 memset(cap_ptr, 0, sizeof(cam_capability_t));
8334 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8335
8336 int index;
8337 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8338 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8339 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8340 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8341 }
8342
8343query_failed:
8344 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8345map_failed:
8346 capabilityHeap->deallocate();
8347allocate_failed:
8348 delete capabilityHeap;
8349
8350 if (rc != NO_ERROR) {
8351 return NULL;
8352 } else {
8353 return cap_ptr;
8354 }
8355}
8356
Thierry Strudel3d639192016-09-09 11:52:26 -07008357/*===========================================================================
8358 * FUNCTION : initCapabilities
8359 *
8360 * DESCRIPTION: initialize camera capabilities in static data struct
8361 *
8362 * PARAMETERS :
8363 * @cameraId : camera Id
8364 *
8365 * RETURN : int32_t type of status
8366 * NO_ERROR -- success
8367 * none-zero failure code
8368 *==========================================================================*/
8369int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8370{
8371 int rc = 0;
8372 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008373 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008374
8375 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8376 if (rc) {
8377 LOGE("camera_open failed. rc = %d", rc);
8378 goto open_failed;
8379 }
8380 if (!cameraHandle) {
8381 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8382 goto open_failed;
8383 }
8384
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008385 handle = get_main_camera_handle(cameraHandle->camera_handle);
8386 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8387 if (gCamCapability[cameraId] == NULL) {
8388 rc = FAILED_TRANSACTION;
8389 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008390 }
8391
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008392 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008393 if (is_dual_camera_by_idx(cameraId)) {
8394 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8395 gCamCapability[cameraId]->aux_cam_cap =
8396 getCapabilities(cameraHandle->ops, handle);
8397 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8398 rc = FAILED_TRANSACTION;
8399 free(gCamCapability[cameraId]);
8400 goto failed_op;
8401 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008402
8403 // Copy the main camera capability to main_cam_cap struct
8404 gCamCapability[cameraId]->main_cam_cap =
8405 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8406 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8407 LOGE("out of memory");
8408 rc = NO_MEMORY;
8409 goto failed_op;
8410 }
8411 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8412 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008413 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008414failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008415 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8416 cameraHandle = NULL;
8417open_failed:
8418 return rc;
8419}
8420
8421/*==========================================================================
8422 * FUNCTION : get3Aversion
8423 *
8424 * DESCRIPTION: get the Q3A S/W version
8425 *
8426 * PARAMETERS :
8427 * @sw_version: Reference of Q3A structure which will hold version info upon
8428 * return
8429 *
8430 * RETURN : None
8431 *
8432 *==========================================================================*/
8433void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8434{
8435 if(gCamCapability[mCameraId])
8436 sw_version = gCamCapability[mCameraId]->q3a_version;
8437 else
8438 LOGE("Capability structure NULL!");
8439}
8440
8441
8442/*===========================================================================
8443 * FUNCTION : initParameters
8444 *
8445 * DESCRIPTION: initialize camera parameters
8446 *
8447 * PARAMETERS :
8448 *
8449 * RETURN : int32_t type of status
8450 * NO_ERROR -- success
8451 * none-zero failure code
8452 *==========================================================================*/
8453int QCamera3HardwareInterface::initParameters()
8454{
8455 int rc = 0;
8456
8457 //Allocate Set Param Buffer
8458 mParamHeap = new QCamera3HeapMemory(1);
8459 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8460 if(rc != OK) {
8461 rc = NO_MEMORY;
8462 LOGE("Failed to allocate SETPARM Heap memory");
8463 delete mParamHeap;
8464 mParamHeap = NULL;
8465 return rc;
8466 }
8467
8468 //Map memory for parameters buffer
8469 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8470 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8471 mParamHeap->getFd(0),
8472 sizeof(metadata_buffer_t),
8473 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8474 if(rc < 0) {
8475 LOGE("failed to map SETPARM buffer");
8476 rc = FAILED_TRANSACTION;
8477 mParamHeap->deallocate();
8478 delete mParamHeap;
8479 mParamHeap = NULL;
8480 return rc;
8481 }
8482
8483 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8484
8485 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8486 return rc;
8487}
8488
8489/*===========================================================================
8490 * FUNCTION : deinitParameters
8491 *
8492 * DESCRIPTION: de-initialize camera parameters
8493 *
8494 * PARAMETERS :
8495 *
8496 * RETURN : NONE
8497 *==========================================================================*/
8498void QCamera3HardwareInterface::deinitParameters()
8499{
8500 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8501 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8502
8503 mParamHeap->deallocate();
8504 delete mParamHeap;
8505 mParamHeap = NULL;
8506
8507 mParameters = NULL;
8508
8509 free(mPrevParameters);
8510 mPrevParameters = NULL;
8511}
8512
8513/*===========================================================================
8514 * FUNCTION : calcMaxJpegSize
8515 *
8516 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8517 *
8518 * PARAMETERS :
8519 *
8520 * RETURN : max_jpeg_size
8521 *==========================================================================*/
8522size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8523{
8524 size_t max_jpeg_size = 0;
8525 size_t temp_width, temp_height;
8526 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8527 MAX_SIZES_CNT);
8528 for (size_t i = 0; i < count; i++) {
8529 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8530 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8531 if (temp_width * temp_height > max_jpeg_size ) {
8532 max_jpeg_size = temp_width * temp_height;
8533 }
8534 }
8535 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8536 return max_jpeg_size;
8537}
8538
8539/*===========================================================================
8540 * FUNCTION : getMaxRawSize
8541 *
8542 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8543 *
8544 * PARAMETERS :
8545 *
8546 * RETURN : Largest supported Raw Dimension
8547 *==========================================================================*/
8548cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8549{
8550 int max_width = 0;
8551 cam_dimension_t maxRawSize;
8552
8553 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8554 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8555 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8556 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8557 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8558 }
8559 }
8560 return maxRawSize;
8561}
8562
8563
8564/*===========================================================================
8565 * FUNCTION : calcMaxJpegDim
8566 *
8567 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8568 *
8569 * PARAMETERS :
8570 *
8571 * RETURN : max_jpeg_dim
8572 *==========================================================================*/
8573cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8574{
8575 cam_dimension_t max_jpeg_dim;
8576 cam_dimension_t curr_jpeg_dim;
8577 max_jpeg_dim.width = 0;
8578 max_jpeg_dim.height = 0;
8579 curr_jpeg_dim.width = 0;
8580 curr_jpeg_dim.height = 0;
8581 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8582 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8583 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8584 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8585 max_jpeg_dim.width * max_jpeg_dim.height ) {
8586 max_jpeg_dim.width = curr_jpeg_dim.width;
8587 max_jpeg_dim.height = curr_jpeg_dim.height;
8588 }
8589 }
8590 return max_jpeg_dim;
8591}
8592
8593/*===========================================================================
8594 * FUNCTION : addStreamConfig
8595 *
8596 * DESCRIPTION: adds the stream configuration to the array
8597 *
8598 * PARAMETERS :
8599 * @available_stream_configs : pointer to stream configuration array
8600 * @scalar_format : scalar format
8601 * @dim : configuration dimension
8602 * @config_type : input or output configuration type
8603 *
8604 * RETURN : NONE
8605 *==========================================================================*/
8606void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8607 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8608{
8609 available_stream_configs.add(scalar_format);
8610 available_stream_configs.add(dim.width);
8611 available_stream_configs.add(dim.height);
8612 available_stream_configs.add(config_type);
8613}
8614
8615/*===========================================================================
8616 * FUNCTION : suppportBurstCapture
8617 *
8618 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8619 *
8620 * PARAMETERS :
8621 * @cameraId : camera Id
8622 *
8623 * RETURN : true if camera supports BURST_CAPTURE
8624 * false otherwise
8625 *==========================================================================*/
8626bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8627{
8628 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8629 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8630 const int32_t highResWidth = 3264;
8631 const int32_t highResHeight = 2448;
8632
8633 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8634 // Maximum resolution images cannot be captured at >= 10fps
8635 // -> not supporting BURST_CAPTURE
8636 return false;
8637 }
8638
8639 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8640 // Maximum resolution images can be captured at >= 20fps
8641 // --> supporting BURST_CAPTURE
8642 return true;
8643 }
8644
8645 // Find the smallest highRes resolution, or largest resolution if there is none
8646 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8647 MAX_SIZES_CNT);
8648 size_t highRes = 0;
8649 while ((highRes + 1 < totalCnt) &&
8650 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8651 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8652 highResWidth * highResHeight)) {
8653 highRes++;
8654 }
8655 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8656 return true;
8657 } else {
8658 return false;
8659 }
8660}
8661
8662/*===========================================================================
8663 * FUNCTION : initStaticMetadata
8664 *
8665 * DESCRIPTION: initialize the static metadata
8666 *
8667 * PARAMETERS :
8668 * @cameraId : camera Id
8669 *
8670 * RETURN : int32_t type of status
8671 * 0 -- success
8672 * non-zero failure code
8673 *==========================================================================*/
8674int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8675{
8676 int rc = 0;
8677 CameraMetadata staticInfo;
8678 size_t count = 0;
8679 bool limitedDevice = false;
8680 char prop[PROPERTY_VALUE_MAX];
8681 bool supportBurst = false;
8682
8683 supportBurst = supportBurstCapture(cameraId);
8684
8685 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8686 * guaranteed or if min fps of max resolution is less than 20 fps, its
8687 * advertised as limited device*/
8688 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8689 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8690 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8691 !supportBurst;
8692
8693 uint8_t supportedHwLvl = limitedDevice ?
8694 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008695#ifndef USE_HAL_3_3
8696 // LEVEL_3 - This device will support level 3.
8697 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8698#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008699 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008700#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008701
8702 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8703 &supportedHwLvl, 1);
8704
8705 bool facingBack = false;
8706 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8707 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8708 facingBack = true;
8709 }
8710 /*HAL 3 only*/
8711 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8712 &gCamCapability[cameraId]->min_focus_distance, 1);
8713
8714 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8715 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8716
8717 /*should be using focal lengths but sensor doesn't provide that info now*/
8718 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8719 &gCamCapability[cameraId]->focal_length,
8720 1);
8721
8722 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8723 gCamCapability[cameraId]->apertures,
8724 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8725
8726 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8727 gCamCapability[cameraId]->filter_densities,
8728 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8729
8730
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008731 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8732 size_t mode_count =
8733 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8734 for (size_t i = 0; i < mode_count; i++) {
8735 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008737 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008738 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008739
8740 int32_t lens_shading_map_size[] = {
8741 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8742 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8743 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8744 lens_shading_map_size,
8745 sizeof(lens_shading_map_size)/sizeof(int32_t));
8746
8747 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8748 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8749
8750 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8751 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8752
8753 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8754 &gCamCapability[cameraId]->max_frame_duration, 1);
8755
8756 camera_metadata_rational baseGainFactor = {
8757 gCamCapability[cameraId]->base_gain_factor.numerator,
8758 gCamCapability[cameraId]->base_gain_factor.denominator};
8759 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8760 &baseGainFactor, 1);
8761
8762 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8763 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8764
8765 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8766 gCamCapability[cameraId]->pixel_array_size.height};
8767 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8768 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8769
8770 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8771 gCamCapability[cameraId]->active_array_size.top,
8772 gCamCapability[cameraId]->active_array_size.width,
8773 gCamCapability[cameraId]->active_array_size.height};
8774 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8775 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8776
8777 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8778 &gCamCapability[cameraId]->white_level, 1);
8779
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008780 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8781 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8782 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008783 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008784 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008785
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008786#ifndef USE_HAL_3_3
8787 bool hasBlackRegions = false;
8788 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8789 LOGW("black_region_count: %d is bounded to %d",
8790 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8791 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8792 }
8793 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8794 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8795 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8796 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8797 }
8798 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8799 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8800 hasBlackRegions = true;
8801 }
8802#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008803 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8804 &gCamCapability[cameraId]->flash_charge_duration, 1);
8805
8806 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8807 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8808
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008809 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8810 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8811 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008812 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8813 &timestampSource, 1);
8814
Thierry Strudel54dc9782017-02-15 12:12:10 -08008815 //update histogram vendor data
8816 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008817 &gCamCapability[cameraId]->histogram_size, 1);
8818
Thierry Strudel54dc9782017-02-15 12:12:10 -08008819 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008820 &gCamCapability[cameraId]->max_histogram_count, 1);
8821
8822 int32_t sharpness_map_size[] = {
8823 gCamCapability[cameraId]->sharpness_map_size.width,
8824 gCamCapability[cameraId]->sharpness_map_size.height};
8825
8826 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8827 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8828
8829 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8830 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8831
8832 int32_t scalar_formats[] = {
8833 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8834 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8835 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8836 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8837 HAL_PIXEL_FORMAT_RAW10,
8838 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8839 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8840 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8841 scalar_formats,
8842 scalar_formats_count);
8843
8844 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8845 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8846 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8847 count, MAX_SIZES_CNT, available_processed_sizes);
8848 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8849 available_processed_sizes, count * 2);
8850
8851 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8852 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8853 makeTable(gCamCapability[cameraId]->raw_dim,
8854 count, MAX_SIZES_CNT, available_raw_sizes);
8855 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8856 available_raw_sizes, count * 2);
8857
8858 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8859 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8860 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8861 count, MAX_SIZES_CNT, available_fps_ranges);
8862 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8863 available_fps_ranges, count * 2);
8864
8865 camera_metadata_rational exposureCompensationStep = {
8866 gCamCapability[cameraId]->exp_compensation_step.numerator,
8867 gCamCapability[cameraId]->exp_compensation_step.denominator};
8868 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8869 &exposureCompensationStep, 1);
8870
8871 Vector<uint8_t> availableVstabModes;
8872 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8873 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008874 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008875 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008876 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008877 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008878 count = IS_TYPE_MAX;
8879 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8880 for (size_t i = 0; i < count; i++) {
8881 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8882 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8883 eisSupported = true;
8884 break;
8885 }
8886 }
8887 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008888 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8889 }
8890 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8891 availableVstabModes.array(), availableVstabModes.size());
8892
8893 /*HAL 1 and HAL 3 common*/
8894 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8895 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8896 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8897 float maxZoom = maxZoomStep/minZoomStep;
8898 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8899 &maxZoom, 1);
8900
8901 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8902 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8903
8904 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8905 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8906 max3aRegions[2] = 0; /* AF not supported */
8907 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8908 max3aRegions, 3);
8909
8910 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8911 memset(prop, 0, sizeof(prop));
8912 property_get("persist.camera.facedetect", prop, "1");
8913 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8914 LOGD("Support face detection mode: %d",
8915 supportedFaceDetectMode);
8916
8917 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008918 /* support mode should be OFF if max number of face is 0 */
8919 if (maxFaces <= 0) {
8920 supportedFaceDetectMode = 0;
8921 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008922 Vector<uint8_t> availableFaceDetectModes;
8923 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8924 if (supportedFaceDetectMode == 1) {
8925 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8926 } else if (supportedFaceDetectMode == 2) {
8927 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8928 } else if (supportedFaceDetectMode == 3) {
8929 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8930 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8931 } else {
8932 maxFaces = 0;
8933 }
8934 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8935 availableFaceDetectModes.array(),
8936 availableFaceDetectModes.size());
8937 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8938 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008939 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8940 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8941 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008942
Emilian Peev7650c122017-01-19 08:24:33 -08008943#ifdef SUPPORT_DEPTH_DATA
Emilian Peev0ce959f2017-03-07 16:49:49 +00008944 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8945 //TODO: Update depth size accordingly, currently we use active array
8946 // as reference.
8947 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8948 int32_t depthHeight =
8949 gCamCapability[cameraId]->active_array_size.height;
8950 //As per spec. depth cloud should be sample count / 16
8951 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8952 assert(0 < depthSamplesCount);
8953 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
8954 &depthSamplesCount, 1);
Emilian Peev7650c122017-01-19 08:24:33 -08008955
Emilian Peev0ce959f2017-03-07 16:49:49 +00008956 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8957 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8958 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8959 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008960
Emilian Peev0ce959f2017-03-07 16:49:49 +00008961 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8962 1, 1 };
8963 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8964 depthMinDuration,
8965 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008966
Emilian Peev0ce959f2017-03-07 16:49:49 +00008967 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB,
8968 depthSamplesCount, 1, 0 };
8969 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8970 depthStallDuration,
8971 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008972
Emilian Peev0ce959f2017-03-07 16:49:49 +00008973 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8974 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8975 }
Emilian Peev7650c122017-01-19 08:24:33 -08008976#endif
8977
Thierry Strudel3d639192016-09-09 11:52:26 -07008978 int32_t exposureCompensationRange[] = {
8979 gCamCapability[cameraId]->exposure_compensation_min,
8980 gCamCapability[cameraId]->exposure_compensation_max};
8981 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8982 exposureCompensationRange,
8983 sizeof(exposureCompensationRange)/sizeof(int32_t));
8984
8985 uint8_t lensFacing = (facingBack) ?
8986 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8987 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8988
8989 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8990 available_thumbnail_sizes,
8991 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8992
8993 /*all sizes will be clubbed into this tag*/
8994 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8995 /*android.scaler.availableStreamConfigurations*/
8996 Vector<int32_t> available_stream_configs;
8997 cam_dimension_t active_array_dim;
8998 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8999 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009000
9001 /*advertise list of input dimensions supported based on below property.
9002 By default all sizes upto 5MP will be advertised.
9003 Note that the setprop resolution format should be WxH.
9004 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9005 To list all supported sizes, setprop needs to be set with "0x0" */
9006 cam_dimension_t minInputSize = {2592,1944}; //5MP
9007 memset(prop, 0, sizeof(prop));
9008 property_get("persist.camera.input.minsize", prop, "2592x1944");
9009 if (strlen(prop) > 0) {
9010 char *saveptr = NULL;
9011 char *token = strtok_r(prop, "x", &saveptr);
9012 if (token != NULL) {
9013 minInputSize.width = atoi(token);
9014 }
9015 token = strtok_r(NULL, "x", &saveptr);
9016 if (token != NULL) {
9017 minInputSize.height = atoi(token);
9018 }
9019 }
9020
Thierry Strudel3d639192016-09-09 11:52:26 -07009021 /* Add input/output stream configurations for each scalar formats*/
9022 for (size_t j = 0; j < scalar_formats_count; j++) {
9023 switch (scalar_formats[j]) {
9024 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9025 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9026 case HAL_PIXEL_FORMAT_RAW10:
9027 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9028 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9029 addStreamConfig(available_stream_configs, scalar_formats[j],
9030 gCamCapability[cameraId]->raw_dim[i],
9031 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9032 }
9033 break;
9034 case HAL_PIXEL_FORMAT_BLOB:
9035 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9036 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9037 addStreamConfig(available_stream_configs, scalar_formats[j],
9038 gCamCapability[cameraId]->picture_sizes_tbl[i],
9039 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9040 }
9041 break;
9042 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9043 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9044 default:
9045 cam_dimension_t largest_picture_size;
9046 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9047 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9048 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9049 addStreamConfig(available_stream_configs, scalar_formats[j],
9050 gCamCapability[cameraId]->picture_sizes_tbl[i],
9051 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009052 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9053 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9054 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9055 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9056 >= minInputSize.width) || (gCamCapability[cameraId]->
9057 picture_sizes_tbl[i].height >= minInputSize.height)) {
9058 addStreamConfig(available_stream_configs, scalar_formats[j],
9059 gCamCapability[cameraId]->picture_sizes_tbl[i],
9060 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9061 }
9062 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009063 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009064
Thierry Strudel3d639192016-09-09 11:52:26 -07009065 break;
9066 }
9067 }
9068
9069 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9070 available_stream_configs.array(), available_stream_configs.size());
9071 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9072 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9073
9074 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9075 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9076
9077 /* android.scaler.availableMinFrameDurations */
9078 Vector<int64_t> available_min_durations;
9079 for (size_t j = 0; j < scalar_formats_count; j++) {
9080 switch (scalar_formats[j]) {
9081 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9082 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9083 case HAL_PIXEL_FORMAT_RAW10:
9084 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9085 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9086 available_min_durations.add(scalar_formats[j]);
9087 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9088 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9089 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9090 }
9091 break;
9092 default:
9093 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9094 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9095 available_min_durations.add(scalar_formats[j]);
9096 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9097 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9098 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9099 }
9100 break;
9101 }
9102 }
9103 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9104 available_min_durations.array(), available_min_durations.size());
9105
9106 Vector<int32_t> available_hfr_configs;
9107 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9108 int32_t fps = 0;
9109 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9110 case CAM_HFR_MODE_60FPS:
9111 fps = 60;
9112 break;
9113 case CAM_HFR_MODE_90FPS:
9114 fps = 90;
9115 break;
9116 case CAM_HFR_MODE_120FPS:
9117 fps = 120;
9118 break;
9119 case CAM_HFR_MODE_150FPS:
9120 fps = 150;
9121 break;
9122 case CAM_HFR_MODE_180FPS:
9123 fps = 180;
9124 break;
9125 case CAM_HFR_MODE_210FPS:
9126 fps = 210;
9127 break;
9128 case CAM_HFR_MODE_240FPS:
9129 fps = 240;
9130 break;
9131 case CAM_HFR_MODE_480FPS:
9132 fps = 480;
9133 break;
9134 case CAM_HFR_MODE_OFF:
9135 case CAM_HFR_MODE_MAX:
9136 default:
9137 break;
9138 }
9139
9140 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9141 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9142 /* For each HFR frame rate, need to advertise one variable fps range
9143 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9144 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9145 * set by the app. When video recording is started, [120, 120] is
9146 * set. This way sensor configuration does not change when recording
9147 * is started */
9148
9149 /* (width, height, fps_min, fps_max, batch_size_max) */
9150 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9151 j < MAX_SIZES_CNT; j++) {
9152 available_hfr_configs.add(
9153 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9154 available_hfr_configs.add(
9155 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9156 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9157 available_hfr_configs.add(fps);
9158 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9159
9160 /* (width, height, fps_min, fps_max, batch_size_max) */
9161 available_hfr_configs.add(
9162 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9163 available_hfr_configs.add(
9164 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9165 available_hfr_configs.add(fps);
9166 available_hfr_configs.add(fps);
9167 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9168 }
9169 }
9170 }
9171 //Advertise HFR capability only if the property is set
9172 memset(prop, 0, sizeof(prop));
9173 property_get("persist.camera.hal3hfr.enable", prop, "1");
9174 uint8_t hfrEnable = (uint8_t)atoi(prop);
9175
9176 if(hfrEnable && available_hfr_configs.array()) {
9177 staticInfo.update(
9178 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9179 available_hfr_configs.array(), available_hfr_configs.size());
9180 }
9181
9182 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9183 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9184 &max_jpeg_size, 1);
9185
9186 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9187 size_t size = 0;
9188 count = CAM_EFFECT_MODE_MAX;
9189 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9190 for (size_t i = 0; i < count; i++) {
9191 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9192 gCamCapability[cameraId]->supported_effects[i]);
9193 if (NAME_NOT_FOUND != val) {
9194 avail_effects[size] = (uint8_t)val;
9195 size++;
9196 }
9197 }
9198 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9199 avail_effects,
9200 size);
9201
9202 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9203 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9204 size_t supported_scene_modes_cnt = 0;
9205 count = CAM_SCENE_MODE_MAX;
9206 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9207 for (size_t i = 0; i < count; i++) {
9208 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9209 CAM_SCENE_MODE_OFF) {
9210 int val = lookupFwkName(SCENE_MODES_MAP,
9211 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9212 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009213
Thierry Strudel3d639192016-09-09 11:52:26 -07009214 if (NAME_NOT_FOUND != val) {
9215 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9216 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9217 supported_scene_modes_cnt++;
9218 }
9219 }
9220 }
9221 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9222 avail_scene_modes,
9223 supported_scene_modes_cnt);
9224
9225 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9226 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9227 supported_scene_modes_cnt,
9228 CAM_SCENE_MODE_MAX,
9229 scene_mode_overrides,
9230 supported_indexes,
9231 cameraId);
9232
9233 if (supported_scene_modes_cnt == 0) {
9234 supported_scene_modes_cnt = 1;
9235 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9236 }
9237
9238 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9239 scene_mode_overrides, supported_scene_modes_cnt * 3);
9240
9241 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9242 ANDROID_CONTROL_MODE_AUTO,
9243 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9244 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9245 available_control_modes,
9246 3);
9247
9248 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9249 size = 0;
9250 count = CAM_ANTIBANDING_MODE_MAX;
9251 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9252 for (size_t i = 0; i < count; i++) {
9253 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9254 gCamCapability[cameraId]->supported_antibandings[i]);
9255 if (NAME_NOT_FOUND != val) {
9256 avail_antibanding_modes[size] = (uint8_t)val;
9257 size++;
9258 }
9259
9260 }
9261 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9262 avail_antibanding_modes,
9263 size);
9264
9265 uint8_t avail_abberation_modes[] = {
9266 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9267 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9268 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9269 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9270 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9271 if (0 == count) {
9272 // If no aberration correction modes are available for a device, this advertise OFF mode
9273 size = 1;
9274 } else {
9275 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9276 // So, advertize all 3 modes if atleast any one mode is supported as per the
9277 // new M requirement
9278 size = 3;
9279 }
9280 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9281 avail_abberation_modes,
9282 size);
9283
9284 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9285 size = 0;
9286 count = CAM_FOCUS_MODE_MAX;
9287 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9288 for (size_t i = 0; i < count; i++) {
9289 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9290 gCamCapability[cameraId]->supported_focus_modes[i]);
9291 if (NAME_NOT_FOUND != val) {
9292 avail_af_modes[size] = (uint8_t)val;
9293 size++;
9294 }
9295 }
9296 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9297 avail_af_modes,
9298 size);
9299
9300 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9301 size = 0;
9302 count = CAM_WB_MODE_MAX;
9303 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9304 for (size_t i = 0; i < count; i++) {
9305 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9306 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9307 gCamCapability[cameraId]->supported_white_balances[i]);
9308 if (NAME_NOT_FOUND != val) {
9309 avail_awb_modes[size] = (uint8_t)val;
9310 size++;
9311 }
9312 }
9313 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9314 avail_awb_modes,
9315 size);
9316
9317 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9318 count = CAM_FLASH_FIRING_LEVEL_MAX;
9319 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9320 count);
9321 for (size_t i = 0; i < count; i++) {
9322 available_flash_levels[i] =
9323 gCamCapability[cameraId]->supported_firing_levels[i];
9324 }
9325 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9326 available_flash_levels, count);
9327
9328 uint8_t flashAvailable;
9329 if (gCamCapability[cameraId]->flash_available)
9330 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9331 else
9332 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9333 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9334 &flashAvailable, 1);
9335
9336 Vector<uint8_t> avail_ae_modes;
9337 count = CAM_AE_MODE_MAX;
9338 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9339 for (size_t i = 0; i < count; i++) {
9340 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9341 }
9342 if (flashAvailable) {
9343 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9344 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009345 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009346 }
9347 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9348 avail_ae_modes.array(),
9349 avail_ae_modes.size());
9350
9351 int32_t sensitivity_range[2];
9352 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9353 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9354 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9355 sensitivity_range,
9356 sizeof(sensitivity_range) / sizeof(int32_t));
9357
9358 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9359 &gCamCapability[cameraId]->max_analog_sensitivity,
9360 1);
9361
9362 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9363 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9364 &sensor_orientation,
9365 1);
9366
9367 int32_t max_output_streams[] = {
9368 MAX_STALLING_STREAMS,
9369 MAX_PROCESSED_STREAMS,
9370 MAX_RAW_STREAMS};
9371 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9372 max_output_streams,
9373 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9374
9375 uint8_t avail_leds = 0;
9376 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9377 &avail_leds, 0);
9378
9379 uint8_t focus_dist_calibrated;
9380 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9381 gCamCapability[cameraId]->focus_dist_calibrated);
9382 if (NAME_NOT_FOUND != val) {
9383 focus_dist_calibrated = (uint8_t)val;
9384 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9385 &focus_dist_calibrated, 1);
9386 }
9387
9388 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9389 size = 0;
9390 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9391 MAX_TEST_PATTERN_CNT);
9392 for (size_t i = 0; i < count; i++) {
9393 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9394 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9395 if (NAME_NOT_FOUND != testpatternMode) {
9396 avail_testpattern_modes[size] = testpatternMode;
9397 size++;
9398 }
9399 }
9400 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9401 avail_testpattern_modes,
9402 size);
9403
9404 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9405 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9406 &max_pipeline_depth,
9407 1);
9408
9409 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9410 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9411 &partial_result_count,
9412 1);
9413
9414 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9415 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9416
9417 Vector<uint8_t> available_capabilities;
9418 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9419 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9420 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9421 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9422 if (supportBurst) {
9423 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9424 }
9425 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9426 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9427 if (hfrEnable && available_hfr_configs.array()) {
9428 available_capabilities.add(
9429 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9430 }
9431
9432 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9433 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9434 }
9435 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9436 available_capabilities.array(),
9437 available_capabilities.size());
9438
9439 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9440 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9441 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9442 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9443
9444 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9445 &aeLockAvailable, 1);
9446
9447 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9448 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9449 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9450 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9451
9452 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9453 &awbLockAvailable, 1);
9454
9455 int32_t max_input_streams = 1;
9456 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9457 &max_input_streams,
9458 1);
9459
9460 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9461 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9462 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9463 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9464 HAL_PIXEL_FORMAT_YCbCr_420_888};
9465 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9466 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9467
9468 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9469 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9470 &max_latency,
9471 1);
9472
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009473#ifndef USE_HAL_3_3
9474 int32_t isp_sensitivity_range[2];
9475 isp_sensitivity_range[0] =
9476 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9477 isp_sensitivity_range[1] =
9478 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9479 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9480 isp_sensitivity_range,
9481 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9482#endif
9483
Thierry Strudel3d639192016-09-09 11:52:26 -07009484 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9485 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9486 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9487 available_hot_pixel_modes,
9488 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9489
9490 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9491 ANDROID_SHADING_MODE_FAST,
9492 ANDROID_SHADING_MODE_HIGH_QUALITY};
9493 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9494 available_shading_modes,
9495 3);
9496
9497 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9498 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9499 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9500 available_lens_shading_map_modes,
9501 2);
9502
9503 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9504 ANDROID_EDGE_MODE_FAST,
9505 ANDROID_EDGE_MODE_HIGH_QUALITY,
9506 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9507 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9508 available_edge_modes,
9509 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9510
9511 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9512 ANDROID_NOISE_REDUCTION_MODE_FAST,
9513 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9514 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9515 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9516 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9517 available_noise_red_modes,
9518 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9519
9520 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9521 ANDROID_TONEMAP_MODE_FAST,
9522 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9523 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9524 available_tonemap_modes,
9525 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9526
9527 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9528 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9529 available_hot_pixel_map_modes,
9530 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9531
9532 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9533 gCamCapability[cameraId]->reference_illuminant1);
9534 if (NAME_NOT_FOUND != val) {
9535 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9536 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9537 }
9538
9539 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9540 gCamCapability[cameraId]->reference_illuminant2);
9541 if (NAME_NOT_FOUND != val) {
9542 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9543 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9544 }
9545
9546 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9547 (void *)gCamCapability[cameraId]->forward_matrix1,
9548 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9549
9550 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9551 (void *)gCamCapability[cameraId]->forward_matrix2,
9552 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9553
9554 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9555 (void *)gCamCapability[cameraId]->color_transform1,
9556 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9557
9558 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9559 (void *)gCamCapability[cameraId]->color_transform2,
9560 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9561
9562 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9563 (void *)gCamCapability[cameraId]->calibration_transform1,
9564 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9565
9566 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9567 (void *)gCamCapability[cameraId]->calibration_transform2,
9568 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9569
9570 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9571 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9572 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9573 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9574 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9575 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9576 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9577 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9578 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9579 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9580 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9581 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9582 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9583 ANDROID_JPEG_GPS_COORDINATES,
9584 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9585 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9586 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9587 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9588 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9589 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9590 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9591 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9592 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9593 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009594#ifndef USE_HAL_3_3
9595 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009597 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009598 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009599 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9600 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009601 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009602 /* DevCamDebug metadata request_keys_basic */
9603 DEVCAMDEBUG_META_ENABLE,
9604 /* DevCamDebug metadata end */
9605 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009606
9607 size_t request_keys_cnt =
9608 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9609 Vector<int32_t> available_request_keys;
9610 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9611 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9612 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9613 }
9614
9615 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9616 available_request_keys.array(), available_request_keys.size());
9617
9618 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9619 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9620 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9621 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9622 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9623 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9624 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9625 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9626 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9627 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9628 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9629 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9630 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9631 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9632 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9633 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9634 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009635 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009636 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9637 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9638 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009639 ANDROID_STATISTICS_FACE_SCORES,
9640#ifndef USE_HAL_3_3
9641 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9642#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009643 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009644 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009645 // DevCamDebug metadata result_keys_basic
9646 DEVCAMDEBUG_META_ENABLE,
9647 // DevCamDebug metadata result_keys AF
9648 DEVCAMDEBUG_AF_LENS_POSITION,
9649 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9650 DEVCAMDEBUG_AF_TOF_DISTANCE,
9651 DEVCAMDEBUG_AF_LUMA,
9652 DEVCAMDEBUG_AF_HAF_STATE,
9653 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9654 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9655 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9656 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9657 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9658 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9659 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9660 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9661 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9662 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9663 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9664 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9665 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9666 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9667 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9668 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9669 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9670 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9671 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9672 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9673 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9674 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9675 // DevCamDebug metadata result_keys AEC
9676 DEVCAMDEBUG_AEC_TARGET_LUMA,
9677 DEVCAMDEBUG_AEC_COMP_LUMA,
9678 DEVCAMDEBUG_AEC_AVG_LUMA,
9679 DEVCAMDEBUG_AEC_CUR_LUMA,
9680 DEVCAMDEBUG_AEC_LINECOUNT,
9681 DEVCAMDEBUG_AEC_REAL_GAIN,
9682 DEVCAMDEBUG_AEC_EXP_INDEX,
9683 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009684 // DevCamDebug metadata result_keys zzHDR
9685 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9686 DEVCAMDEBUG_AEC_L_LINECOUNT,
9687 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9688 DEVCAMDEBUG_AEC_S_LINECOUNT,
9689 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9690 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9691 // DevCamDebug metadata result_keys ADRC
9692 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9693 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9694 DEVCAMDEBUG_AEC_GTM_RATIO,
9695 DEVCAMDEBUG_AEC_LTM_RATIO,
9696 DEVCAMDEBUG_AEC_LA_RATIO,
9697 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009698 // DevCamDebug metadata result_keys AWB
9699 DEVCAMDEBUG_AWB_R_GAIN,
9700 DEVCAMDEBUG_AWB_G_GAIN,
9701 DEVCAMDEBUG_AWB_B_GAIN,
9702 DEVCAMDEBUG_AWB_CCT,
9703 DEVCAMDEBUG_AWB_DECISION,
9704 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009705 };
9706
Thierry Strudel3d639192016-09-09 11:52:26 -07009707 size_t result_keys_cnt =
9708 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9709
9710 Vector<int32_t> available_result_keys;
9711 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9712 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9713 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9714 }
9715 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9716 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9717 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9718 }
9719 if (supportedFaceDetectMode == 1) {
9720 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9721 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9722 } else if ((supportedFaceDetectMode == 2) ||
9723 (supportedFaceDetectMode == 3)) {
9724 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9725 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9726 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009727#ifndef USE_HAL_3_3
9728 if (hasBlackRegions) {
9729 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9730 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9731 }
9732#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009733 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9734 available_result_keys.array(), available_result_keys.size());
9735
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009736 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009737 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9738 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9739 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9740 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9741 ANDROID_SCALER_CROPPING_TYPE,
9742 ANDROID_SYNC_MAX_LATENCY,
9743 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9744 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9745 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9746 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9747 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9748 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9749 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9750 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9751 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9752 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9753 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9754 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9755 ANDROID_LENS_FACING,
9756 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9757 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9758 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9759 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9760 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9761 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9762 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9763 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9764 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9765 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9766 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9767 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9768 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9769 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9770 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9771 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9772 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9773 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9774 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9775 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009776 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009777 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9778 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9779 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9780 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9781 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9782 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9783 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9784 ANDROID_CONTROL_AVAILABLE_MODES,
9785 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9786 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9787 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9788 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009789 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009790#ifdef SUPPORT_DEPTH_DATA
9791 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9792 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9793 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9794 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9795 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9796#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009797#ifndef USE_HAL_3_3
9798 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9799 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9800#endif
9801 };
9802
9803 Vector<int32_t> available_characteristics_keys;
9804 available_characteristics_keys.appendArray(characteristics_keys_basic,
9805 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9806#ifndef USE_HAL_3_3
9807 if (hasBlackRegions) {
9808 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9809 }
9810#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009811 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009812 available_characteristics_keys.array(),
9813 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009814
9815 /*available stall durations depend on the hw + sw and will be different for different devices */
9816 /*have to add for raw after implementation*/
9817 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9818 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9819
9820 Vector<int64_t> available_stall_durations;
9821 for (uint32_t j = 0; j < stall_formats_count; j++) {
9822 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9823 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9824 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9825 available_stall_durations.add(stall_formats[j]);
9826 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9827 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9828 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9829 }
9830 } else {
9831 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9832 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9833 available_stall_durations.add(stall_formats[j]);
9834 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9835 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9836 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9837 }
9838 }
9839 }
9840 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9841 available_stall_durations.array(),
9842 available_stall_durations.size());
9843
9844 //QCAMERA3_OPAQUE_RAW
9845 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9846 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9847 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9848 case LEGACY_RAW:
9849 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9850 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9851 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9852 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9853 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9854 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9855 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9856 break;
9857 case MIPI_RAW:
9858 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9859 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9860 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9861 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9862 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9863 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9864 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9865 break;
9866 default:
9867 LOGE("unknown opaque_raw_format %d",
9868 gCamCapability[cameraId]->opaque_raw_fmt);
9869 break;
9870 }
9871 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9872
9873 Vector<int32_t> strides;
9874 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9875 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9876 cam_stream_buf_plane_info_t buf_planes;
9877 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9878 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9879 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9880 &gCamCapability[cameraId]->padding_info, &buf_planes);
9881 strides.add(buf_planes.plane_info.mp[0].stride);
9882 }
9883 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9884 strides.size());
9885
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009886 //TBD: remove the following line once backend advertises zzHDR in feature mask
9887 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009888 //Video HDR default
9889 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9890 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009891 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009892 int32_t vhdr_mode[] = {
9893 QCAMERA3_VIDEO_HDR_MODE_OFF,
9894 QCAMERA3_VIDEO_HDR_MODE_ON};
9895
9896 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9897 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9898 vhdr_mode, vhdr_mode_count);
9899 }
9900
Thierry Strudel3d639192016-09-09 11:52:26 -07009901 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9902 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9903 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9904
9905 uint8_t isMonoOnly =
9906 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9907 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9908 &isMonoOnly, 1);
9909
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009910#ifndef USE_HAL_3_3
9911 Vector<int32_t> opaque_size;
9912 for (size_t j = 0; j < scalar_formats_count; j++) {
9913 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9914 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9915 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9916 cam_stream_buf_plane_info_t buf_planes;
9917
9918 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9919 &gCamCapability[cameraId]->padding_info, &buf_planes);
9920
9921 if (rc == 0) {
9922 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9923 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9924 opaque_size.add(buf_planes.plane_info.frame_len);
9925 }else {
9926 LOGE("raw frame calculation failed!");
9927 }
9928 }
9929 }
9930 }
9931
9932 if ((opaque_size.size() > 0) &&
9933 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9934 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9935 else
9936 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9937#endif
9938
Thierry Strudel04e026f2016-10-10 11:27:36 -07009939 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9940 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9941 size = 0;
9942 count = CAM_IR_MODE_MAX;
9943 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9944 for (size_t i = 0; i < count; i++) {
9945 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9946 gCamCapability[cameraId]->supported_ir_modes[i]);
9947 if (NAME_NOT_FOUND != val) {
9948 avail_ir_modes[size] = (int32_t)val;
9949 size++;
9950 }
9951 }
9952 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9953 avail_ir_modes, size);
9954 }
9955
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009956 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9957 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9958 size = 0;
9959 count = CAM_AEC_CONVERGENCE_MAX;
9960 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9961 for (size_t i = 0; i < count; i++) {
9962 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9963 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9964 if (NAME_NOT_FOUND != val) {
9965 available_instant_aec_modes[size] = (int32_t)val;
9966 size++;
9967 }
9968 }
9969 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9970 available_instant_aec_modes, size);
9971 }
9972
Thierry Strudel54dc9782017-02-15 12:12:10 -08009973 int32_t sharpness_range[] = {
9974 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9975 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9976 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9977
9978 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9979 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9980 size = 0;
9981 count = CAM_BINNING_CORRECTION_MODE_MAX;
9982 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9983 for (size_t i = 0; i < count; i++) {
9984 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9985 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9986 gCamCapability[cameraId]->supported_binning_modes[i]);
9987 if (NAME_NOT_FOUND != val) {
9988 avail_binning_modes[size] = (int32_t)val;
9989 size++;
9990 }
9991 }
9992 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9993 avail_binning_modes, size);
9994 }
9995
9996 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9997 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9998 size = 0;
9999 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10000 for (size_t i = 0; i < count; i++) {
10001 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10002 gCamCapability[cameraId]->supported_aec_modes[i]);
10003 if (NAME_NOT_FOUND != val)
10004 available_aec_modes[size++] = val;
10005 }
10006 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10007 available_aec_modes, size);
10008 }
10009
10010 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10011 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10012 size = 0;
10013 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10014 for (size_t i = 0; i < count; i++) {
10015 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10016 gCamCapability[cameraId]->supported_iso_modes[i]);
10017 if (NAME_NOT_FOUND != val)
10018 available_iso_modes[size++] = val;
10019 }
10020 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10021 available_iso_modes, size);
10022 }
10023
10024 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10025 for (size_t i = 0; i < count; i++)
10026 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10027 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10028 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10029
10030 int32_t available_saturation_range[4];
10031 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10032 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10033 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10034 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10035 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10036 available_saturation_range, 4);
10037
10038 uint8_t is_hdr_values[2];
10039 is_hdr_values[0] = 0;
10040 is_hdr_values[1] = 1;
10041 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10042 is_hdr_values, 2);
10043
10044 float is_hdr_confidence_range[2];
10045 is_hdr_confidence_range[0] = 0.0;
10046 is_hdr_confidence_range[1] = 1.0;
10047 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10048 is_hdr_confidence_range, 2);
10049
Thierry Strudel3d639192016-09-09 11:52:26 -070010050 gStaticMetadata[cameraId] = staticInfo.release();
10051 return rc;
10052}
10053
10054/*===========================================================================
10055 * FUNCTION : makeTable
10056 *
10057 * DESCRIPTION: make a table of sizes
10058 *
10059 * PARAMETERS :
10060 *
10061 *
10062 *==========================================================================*/
10063void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10064 size_t max_size, int32_t *sizeTable)
10065{
10066 size_t j = 0;
10067 if (size > max_size) {
10068 size = max_size;
10069 }
10070 for (size_t i = 0; i < size; i++) {
10071 sizeTable[j] = dimTable[i].width;
10072 sizeTable[j+1] = dimTable[i].height;
10073 j+=2;
10074 }
10075}
10076
10077/*===========================================================================
10078 * FUNCTION : makeFPSTable
10079 *
10080 * DESCRIPTION: make a table of fps ranges
10081 *
10082 * PARAMETERS :
10083 *
10084 *==========================================================================*/
10085void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10086 size_t max_size, int32_t *fpsRangesTable)
10087{
10088 size_t j = 0;
10089 if (size > max_size) {
10090 size = max_size;
10091 }
10092 for (size_t i = 0; i < size; i++) {
10093 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10094 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10095 j+=2;
10096 }
10097}
10098
10099/*===========================================================================
10100 * FUNCTION : makeOverridesList
10101 *
10102 * DESCRIPTION: make a list of scene mode overrides
10103 *
10104 * PARAMETERS :
10105 *
10106 *
10107 *==========================================================================*/
10108void QCamera3HardwareInterface::makeOverridesList(
10109 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10110 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10111{
10112 /*daemon will give a list of overrides for all scene modes.
10113 However we should send the fwk only the overrides for the scene modes
10114 supported by the framework*/
10115 size_t j = 0;
10116 if (size > max_size) {
10117 size = max_size;
10118 }
10119 size_t focus_count = CAM_FOCUS_MODE_MAX;
10120 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10121 focus_count);
10122 for (size_t i = 0; i < size; i++) {
10123 bool supt = false;
10124 size_t index = supported_indexes[i];
10125 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10126 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10127 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10128 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10129 overridesTable[index].awb_mode);
10130 if (NAME_NOT_FOUND != val) {
10131 overridesList[j+1] = (uint8_t)val;
10132 }
10133 uint8_t focus_override = overridesTable[index].af_mode;
10134 for (size_t k = 0; k < focus_count; k++) {
10135 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10136 supt = true;
10137 break;
10138 }
10139 }
10140 if (supt) {
10141 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10142 focus_override);
10143 if (NAME_NOT_FOUND != val) {
10144 overridesList[j+2] = (uint8_t)val;
10145 }
10146 } else {
10147 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10148 }
10149 j+=3;
10150 }
10151}
10152
10153/*===========================================================================
10154 * FUNCTION : filterJpegSizes
10155 *
10156 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10157 * could be downscaled to
10158 *
10159 * PARAMETERS :
10160 *
10161 * RETURN : length of jpegSizes array
10162 *==========================================================================*/
10163
10164size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10165 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10166 uint8_t downscale_factor)
10167{
10168 if (0 == downscale_factor) {
10169 downscale_factor = 1;
10170 }
10171
10172 int32_t min_width = active_array_size.width / downscale_factor;
10173 int32_t min_height = active_array_size.height / downscale_factor;
10174 size_t jpegSizesCnt = 0;
10175 if (processedSizesCnt > maxCount) {
10176 processedSizesCnt = maxCount;
10177 }
10178 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10179 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10180 jpegSizes[jpegSizesCnt] = processedSizes[i];
10181 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10182 jpegSizesCnt += 2;
10183 }
10184 }
10185 return jpegSizesCnt;
10186}
10187
10188/*===========================================================================
10189 * FUNCTION : computeNoiseModelEntryS
10190 *
10191 * DESCRIPTION: function to map a given sensitivity to the S noise
10192 * model parameters in the DNG noise model.
10193 *
10194 * PARAMETERS : sens : the sensor sensitivity
10195 *
10196 ** RETURN : S (sensor amplification) noise
10197 *
10198 *==========================================================================*/
10199double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10200 double s = gCamCapability[mCameraId]->gradient_S * sens +
10201 gCamCapability[mCameraId]->offset_S;
10202 return ((s < 0.0) ? 0.0 : s);
10203}
10204
10205/*===========================================================================
10206 * FUNCTION : computeNoiseModelEntryO
10207 *
10208 * DESCRIPTION: function to map a given sensitivity to the O noise
10209 * model parameters in the DNG noise model.
10210 *
10211 * PARAMETERS : sens : the sensor sensitivity
10212 *
10213 ** RETURN : O (sensor readout) noise
10214 *
10215 *==========================================================================*/
10216double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10217 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10218 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10219 1.0 : (1.0 * sens / max_analog_sens);
10220 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10221 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10222 return ((o < 0.0) ? 0.0 : o);
10223}
10224
10225/*===========================================================================
10226 * FUNCTION : getSensorSensitivity
10227 *
10228 * DESCRIPTION: convert iso_mode to an integer value
10229 *
10230 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10231 *
10232 ** RETURN : sensitivity supported by sensor
10233 *
10234 *==========================================================================*/
10235int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10236{
10237 int32_t sensitivity;
10238
10239 switch (iso_mode) {
10240 case CAM_ISO_MODE_100:
10241 sensitivity = 100;
10242 break;
10243 case CAM_ISO_MODE_200:
10244 sensitivity = 200;
10245 break;
10246 case CAM_ISO_MODE_400:
10247 sensitivity = 400;
10248 break;
10249 case CAM_ISO_MODE_800:
10250 sensitivity = 800;
10251 break;
10252 case CAM_ISO_MODE_1600:
10253 sensitivity = 1600;
10254 break;
10255 default:
10256 sensitivity = -1;
10257 break;
10258 }
10259 return sensitivity;
10260}
10261
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010262int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10263 if (gHdrPlusClient != nullptr) {
10264 return OK;
10265 }
10266
10267 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10268 if (gHdrPlusClient->isEaselPresentOnDevice()) {
10269 // If Easel is present, power on Easel and suspend it immediately.
10270 status_t res = gHdrPlusClient->powerOnEasel();
10271 if (res != OK) {
10272 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10273 gHdrPlusClient = nullptr;
10274 return res;
10275 }
10276
10277 res = gHdrPlusClient->suspendEasel();
10278 if (res != OK) {
10279 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10280 }
10281
10282 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10283 } else {
10284 // Destroy HDR+ client if Easel isn't present.
10285 gHdrPlusClient = nullptr;
10286 }
10287
10288 return OK;
10289}
10290
Thierry Strudel3d639192016-09-09 11:52:26 -070010291/*===========================================================================
10292 * FUNCTION : getCamInfo
10293 *
10294 * DESCRIPTION: query camera capabilities
10295 *
10296 * PARAMETERS :
10297 * @cameraId : camera Id
10298 * @info : camera info struct to be filled in with camera capabilities
10299 *
10300 * RETURN : int type of status
10301 * NO_ERROR -- success
10302 * none-zero failure code
10303 *==========================================================================*/
10304int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10305 struct camera_info *info)
10306{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010307 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 int rc = 0;
10309
10310 pthread_mutex_lock(&gCamLock);
10311 if (NULL == gCamCapability[cameraId]) {
10312 rc = initCapabilities(cameraId);
10313 if (rc < 0) {
10314 pthread_mutex_unlock(&gCamLock);
10315 return rc;
10316 }
10317 }
10318
10319 if (NULL == gStaticMetadata[cameraId]) {
10320 rc = initStaticMetadata(cameraId);
10321 if (rc < 0) {
10322 pthread_mutex_unlock(&gCamLock);
10323 return rc;
10324 }
10325 }
10326
10327 switch(gCamCapability[cameraId]->position) {
10328 case CAM_POSITION_BACK:
10329 case CAM_POSITION_BACK_AUX:
10330 info->facing = CAMERA_FACING_BACK;
10331 break;
10332
10333 case CAM_POSITION_FRONT:
10334 case CAM_POSITION_FRONT_AUX:
10335 info->facing = CAMERA_FACING_FRONT;
10336 break;
10337
10338 default:
10339 LOGE("Unknown position type %d for camera id:%d",
10340 gCamCapability[cameraId]->position, cameraId);
10341 rc = -1;
10342 break;
10343 }
10344
10345
10346 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010347#ifndef USE_HAL_3_3
10348 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10349#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010350 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010351#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010352 info->static_camera_characteristics = gStaticMetadata[cameraId];
10353
10354 //For now assume both cameras can operate independently.
10355 info->conflicting_devices = NULL;
10356 info->conflicting_devices_length = 0;
10357
10358 //resource cost is 100 * MIN(1.0, m/M),
10359 //where m is throughput requirement with maximum stream configuration
10360 //and M is CPP maximum throughput.
10361 float max_fps = 0.0;
10362 for (uint32_t i = 0;
10363 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10364 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10365 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10366 }
10367 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10368 gCamCapability[cameraId]->active_array_size.width *
10369 gCamCapability[cameraId]->active_array_size.height * max_fps /
10370 gCamCapability[cameraId]->max_pixel_bandwidth;
10371 info->resource_cost = 100 * MIN(1.0, ratio);
10372 LOGI("camera %d resource cost is %d", cameraId,
10373 info->resource_cost);
10374
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010375 rc = initHdrPlusClientLocked();
10376 if (rc != OK) {
10377 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10378 pthread_mutex_unlock(&gCamLock);
10379 return rc;
10380 }
10381
Thierry Strudel3d639192016-09-09 11:52:26 -070010382 pthread_mutex_unlock(&gCamLock);
10383 return rc;
10384}
10385
10386/*===========================================================================
10387 * FUNCTION : translateCapabilityToMetadata
10388 *
10389 * DESCRIPTION: translate the capability into camera_metadata_t
10390 *
10391 * PARAMETERS : type of the request
10392 *
10393 *
10394 * RETURN : success: camera_metadata_t*
10395 * failure: NULL
10396 *
10397 *==========================================================================*/
10398camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10399{
10400 if (mDefaultMetadata[type] != NULL) {
10401 return mDefaultMetadata[type];
10402 }
10403 //first time we are handling this request
10404 //fill up the metadata structure using the wrapper class
10405 CameraMetadata settings;
10406 //translate from cam_capability_t to camera_metadata_tag_t
10407 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10408 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10409 int32_t defaultRequestID = 0;
10410 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10411
10412 /* OIS disable */
10413 char ois_prop[PROPERTY_VALUE_MAX];
10414 memset(ois_prop, 0, sizeof(ois_prop));
10415 property_get("persist.camera.ois.disable", ois_prop, "0");
10416 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10417
10418 /* Force video to use OIS */
10419 char videoOisProp[PROPERTY_VALUE_MAX];
10420 memset(videoOisProp, 0, sizeof(videoOisProp));
10421 property_get("persist.camera.ois.video", videoOisProp, "1");
10422 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010423
10424 // Hybrid AE enable/disable
10425 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10426 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10427 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10428 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10429
Thierry Strudel3d639192016-09-09 11:52:26 -070010430 uint8_t controlIntent = 0;
10431 uint8_t focusMode;
10432 uint8_t vsMode;
10433 uint8_t optStabMode;
10434 uint8_t cacMode;
10435 uint8_t edge_mode;
10436 uint8_t noise_red_mode;
10437 uint8_t tonemap_mode;
10438 bool highQualityModeEntryAvailable = FALSE;
10439 bool fastModeEntryAvailable = FALSE;
10440 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10441 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010442 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010443
Thierry Strudel3d639192016-09-09 11:52:26 -070010444 switch (type) {
10445 case CAMERA3_TEMPLATE_PREVIEW:
10446 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10447 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10448 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10449 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10450 edge_mode = ANDROID_EDGE_MODE_FAST;
10451 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10452 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10453 break;
10454 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10455 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10456 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10457 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10458 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10459 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10460 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10461 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10462 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10463 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10464 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10465 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10466 highQualityModeEntryAvailable = TRUE;
10467 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10468 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10469 fastModeEntryAvailable = TRUE;
10470 }
10471 }
10472 if (highQualityModeEntryAvailable) {
10473 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10474 } else if (fastModeEntryAvailable) {
10475 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10476 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010477 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10478 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10479 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010480 break;
10481 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10482 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10483 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10484 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010485 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10486 edge_mode = ANDROID_EDGE_MODE_FAST;
10487 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10488 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10489 if (forceVideoOis)
10490 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10491 break;
10492 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10493 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10494 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10495 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010496 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10497 edge_mode = ANDROID_EDGE_MODE_FAST;
10498 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10499 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10500 if (forceVideoOis)
10501 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10502 break;
10503 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10504 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10505 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10506 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10507 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10508 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10509 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10510 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10511 break;
10512 case CAMERA3_TEMPLATE_MANUAL:
10513 edge_mode = ANDROID_EDGE_MODE_FAST;
10514 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10515 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10516 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10517 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10518 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10519 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10520 break;
10521 default:
10522 edge_mode = ANDROID_EDGE_MODE_FAST;
10523 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10524 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10525 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10526 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10527 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10528 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10529 break;
10530 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010531 // Set CAC to OFF if underlying device doesn't support
10532 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10533 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10534 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010535 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10536 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10537 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10538 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10539 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10540 }
10541 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
10542
10543 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10544 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10545 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10546 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10547 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10548 || ois_disable)
10549 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10550 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010551 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010552
10553 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10554 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10555
10556 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10557 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10558
10559 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10560 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10561
10562 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10563 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10564
10565 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10566 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10567
10568 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10569 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10570
10571 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10572 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10573
10574 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10575 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10576
10577 /*flash*/
10578 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10579 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10580
10581 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10582 settings.update(ANDROID_FLASH_FIRING_POWER,
10583 &flashFiringLevel, 1);
10584
10585 /* lens */
10586 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10587 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10588
10589 if (gCamCapability[mCameraId]->filter_densities_count) {
10590 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10591 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10592 gCamCapability[mCameraId]->filter_densities_count);
10593 }
10594
10595 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10596 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10597
Thierry Strudel3d639192016-09-09 11:52:26 -070010598 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10599 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10600
10601 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10602 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10603
10604 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10605 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10606
10607 /* face detection (default to OFF) */
10608 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10609 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10610
Thierry Strudel54dc9782017-02-15 12:12:10 -080010611 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10612 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010613
10614 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10615 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10616
10617 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10618 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10619
Thierry Strudel3d639192016-09-09 11:52:26 -070010620
10621 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10622 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10623
10624 /* Exposure time(Update the Min Exposure Time)*/
10625 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10626 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10627
10628 /* frame duration */
10629 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10630 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10631
10632 /* sensitivity */
10633 static const int32_t default_sensitivity = 100;
10634 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010635#ifndef USE_HAL_3_3
10636 static const int32_t default_isp_sensitivity =
10637 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10638 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10639#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010640
10641 /*edge mode*/
10642 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10643
10644 /*noise reduction mode*/
10645 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10646
10647 /*color correction mode*/
10648 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10649 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10650
10651 /*transform matrix mode*/
10652 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10653
10654 int32_t scaler_crop_region[4];
10655 scaler_crop_region[0] = 0;
10656 scaler_crop_region[1] = 0;
10657 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10658 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10659 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10660
10661 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10662 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10663
10664 /*focus distance*/
10665 float focus_distance = 0.0;
10666 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10667
10668 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010669 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010670 float max_range = 0.0;
10671 float max_fixed_fps = 0.0;
10672 int32_t fps_range[2] = {0, 0};
10673 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10674 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010675 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10676 TEMPLATE_MAX_PREVIEW_FPS) {
10677 continue;
10678 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010679 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10680 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10681 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10682 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10683 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10684 if (range > max_range) {
10685 fps_range[0] =
10686 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10687 fps_range[1] =
10688 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10689 max_range = range;
10690 }
10691 } else {
10692 if (range < 0.01 && max_fixed_fps <
10693 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10694 fps_range[0] =
10695 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10696 fps_range[1] =
10697 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10698 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10699 }
10700 }
10701 }
10702 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10703
10704 /*precapture trigger*/
10705 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10706 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10707
10708 /*af trigger*/
10709 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10710 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10711
10712 /* ae & af regions */
10713 int32_t active_region[] = {
10714 gCamCapability[mCameraId]->active_array_size.left,
10715 gCamCapability[mCameraId]->active_array_size.top,
10716 gCamCapability[mCameraId]->active_array_size.left +
10717 gCamCapability[mCameraId]->active_array_size.width,
10718 gCamCapability[mCameraId]->active_array_size.top +
10719 gCamCapability[mCameraId]->active_array_size.height,
10720 0};
10721 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10722 sizeof(active_region) / sizeof(active_region[0]));
10723 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10724 sizeof(active_region) / sizeof(active_region[0]));
10725
10726 /* black level lock */
10727 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10728 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10729
Thierry Strudel3d639192016-09-09 11:52:26 -070010730 //special defaults for manual template
10731 if (type == CAMERA3_TEMPLATE_MANUAL) {
10732 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10733 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10734
10735 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10736 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10737
10738 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10739 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10740
10741 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10742 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10743
10744 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10745 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10746
10747 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10748 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10749 }
10750
10751
10752 /* TNR
10753 * We'll use this location to determine which modes TNR will be set.
10754 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10755 * This is not to be confused with linking on a per stream basis that decision
10756 * is still on per-session basis and will be handled as part of config stream
10757 */
10758 uint8_t tnr_enable = 0;
10759
10760 if (m_bTnrPreview || m_bTnrVideo) {
10761
10762 switch (type) {
10763 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10764 tnr_enable = 1;
10765 break;
10766
10767 default:
10768 tnr_enable = 0;
10769 break;
10770 }
10771
10772 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10773 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10774 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10775
10776 LOGD("TNR:%d with process plate %d for template:%d",
10777 tnr_enable, tnr_process_type, type);
10778 }
10779
10780 //Update Link tags to default
10781 int32_t sync_type = CAM_TYPE_STANDALONE;
10782 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10783
10784 int32_t is_main = 0; //this doesn't matter as app should overwrite
10785 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10786
10787 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10788
10789 /* CDS default */
10790 char prop[PROPERTY_VALUE_MAX];
10791 memset(prop, 0, sizeof(prop));
10792 property_get("persist.camera.CDS", prop, "Auto");
10793 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10794 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10795 if (CAM_CDS_MODE_MAX == cds_mode) {
10796 cds_mode = CAM_CDS_MODE_AUTO;
10797 }
10798
10799 /* Disabling CDS in templates which have TNR enabled*/
10800 if (tnr_enable)
10801 cds_mode = CAM_CDS_MODE_OFF;
10802
10803 int32_t mode = cds_mode;
10804 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010805
Thierry Strudel269c81a2016-10-12 12:13:59 -070010806 /* Manual Convergence AEC Speed is disabled by default*/
10807 float default_aec_speed = 0;
10808 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10809
10810 /* Manual Convergence AWB Speed is disabled by default*/
10811 float default_awb_speed = 0;
10812 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10813
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010814 // Set instant AEC to normal convergence by default
10815 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10816 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10817
Shuzhen Wang19463d72016-03-08 11:09:52 -080010818 /* hybrid ae */
10819 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10820
Thierry Strudel3d639192016-09-09 11:52:26 -070010821 mDefaultMetadata[type] = settings.release();
10822
10823 return mDefaultMetadata[type];
10824}
10825
10826/*===========================================================================
10827 * FUNCTION : setFrameParameters
10828 *
10829 * DESCRIPTION: set parameters per frame as requested in the metadata from
10830 * framework
10831 *
10832 * PARAMETERS :
10833 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010834 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010835 * @blob_request: Whether this request is a blob request or not
10836 *
10837 * RETURN : success: NO_ERROR
10838 * failure:
10839 *==========================================================================*/
10840int QCamera3HardwareInterface::setFrameParameters(
10841 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010842 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010843 int blob_request,
10844 uint32_t snapshotStreamId)
10845{
10846 /*translate from camera_metadata_t type to parm_type_t*/
10847 int rc = 0;
10848 int32_t hal_version = CAM_HAL_V3;
10849
10850 clear_metadata_buffer(mParameters);
10851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10852 LOGE("Failed to set hal version in the parameters");
10853 return BAD_VALUE;
10854 }
10855
10856 /*we need to update the frame number in the parameters*/
10857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10858 request->frame_number)) {
10859 LOGE("Failed to set the frame number in the parameters");
10860 return BAD_VALUE;
10861 }
10862
10863 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010864 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010865 LOGE("Failed to set stream type mask in the parameters");
10866 return BAD_VALUE;
10867 }
10868
10869 if (mUpdateDebugLevel) {
10870 uint32_t dummyDebugLevel = 0;
10871 /* The value of dummyDebugLevel is irrelavent. On
10872 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10874 dummyDebugLevel)) {
10875 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10876 return BAD_VALUE;
10877 }
10878 mUpdateDebugLevel = false;
10879 }
10880
10881 if(request->settings != NULL){
10882 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10883 if (blob_request)
10884 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10885 }
10886
10887 return rc;
10888}
10889
10890/*===========================================================================
10891 * FUNCTION : setReprocParameters
10892 *
10893 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10894 * return it.
10895 *
10896 * PARAMETERS :
10897 * @request : request that needs to be serviced
10898 *
10899 * RETURN : success: NO_ERROR
10900 * failure:
10901 *==========================================================================*/
10902int32_t QCamera3HardwareInterface::setReprocParameters(
10903 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10904 uint32_t snapshotStreamId)
10905{
10906 /*translate from camera_metadata_t type to parm_type_t*/
10907 int rc = 0;
10908
10909 if (NULL == request->settings){
10910 LOGE("Reprocess settings cannot be NULL");
10911 return BAD_VALUE;
10912 }
10913
10914 if (NULL == reprocParam) {
10915 LOGE("Invalid reprocessing metadata buffer");
10916 return BAD_VALUE;
10917 }
10918 clear_metadata_buffer(reprocParam);
10919
10920 /*we need to update the frame number in the parameters*/
10921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10922 request->frame_number)) {
10923 LOGE("Failed to set the frame number in the parameters");
10924 return BAD_VALUE;
10925 }
10926
10927 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10928 if (rc < 0) {
10929 LOGE("Failed to translate reproc request");
10930 return rc;
10931 }
10932
10933 CameraMetadata frame_settings;
10934 frame_settings = request->settings;
10935 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10936 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10937 int32_t *crop_count =
10938 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10939 int32_t *crop_data =
10940 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10941 int32_t *roi_map =
10942 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10943 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10944 cam_crop_data_t crop_meta;
10945 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10946 crop_meta.num_of_streams = 1;
10947 crop_meta.crop_info[0].crop.left = crop_data[0];
10948 crop_meta.crop_info[0].crop.top = crop_data[1];
10949 crop_meta.crop_info[0].crop.width = crop_data[2];
10950 crop_meta.crop_info[0].crop.height = crop_data[3];
10951
10952 crop_meta.crop_info[0].roi_map.left =
10953 roi_map[0];
10954 crop_meta.crop_info[0].roi_map.top =
10955 roi_map[1];
10956 crop_meta.crop_info[0].roi_map.width =
10957 roi_map[2];
10958 crop_meta.crop_info[0].roi_map.height =
10959 roi_map[3];
10960
10961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10962 rc = BAD_VALUE;
10963 }
10964 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10965 request->input_buffer->stream,
10966 crop_meta.crop_info[0].crop.left,
10967 crop_meta.crop_info[0].crop.top,
10968 crop_meta.crop_info[0].crop.width,
10969 crop_meta.crop_info[0].crop.height);
10970 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10971 request->input_buffer->stream,
10972 crop_meta.crop_info[0].roi_map.left,
10973 crop_meta.crop_info[0].roi_map.top,
10974 crop_meta.crop_info[0].roi_map.width,
10975 crop_meta.crop_info[0].roi_map.height);
10976 } else {
10977 LOGE("Invalid reprocess crop count %d!", *crop_count);
10978 }
10979 } else {
10980 LOGE("No crop data from matching output stream");
10981 }
10982
10983 /* These settings are not needed for regular requests so handle them specially for
10984 reprocess requests; information needed for EXIF tags */
10985 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10986 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10987 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10988 if (NAME_NOT_FOUND != val) {
10989 uint32_t flashMode = (uint32_t)val;
10990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10991 rc = BAD_VALUE;
10992 }
10993 } else {
10994 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10995 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10996 }
10997 } else {
10998 LOGH("No flash mode in reprocess settings");
10999 }
11000
11001 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11002 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11003 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11004 rc = BAD_VALUE;
11005 }
11006 } else {
11007 LOGH("No flash state in reprocess settings");
11008 }
11009
11010 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11011 uint8_t *reprocessFlags =
11012 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11014 *reprocessFlags)) {
11015 rc = BAD_VALUE;
11016 }
11017 }
11018
Thierry Strudel54dc9782017-02-15 12:12:10 -080011019 // Add exif debug data to internal metadata
11020 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11021 mm_jpeg_debug_exif_params_t *debug_params =
11022 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11023 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11024 // AE
11025 if (debug_params->ae_debug_params_valid == TRUE) {
11026 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11027 debug_params->ae_debug_params);
11028 }
11029 // AWB
11030 if (debug_params->awb_debug_params_valid == TRUE) {
11031 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11032 debug_params->awb_debug_params);
11033 }
11034 // AF
11035 if (debug_params->af_debug_params_valid == TRUE) {
11036 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11037 debug_params->af_debug_params);
11038 }
11039 // ASD
11040 if (debug_params->asd_debug_params_valid == TRUE) {
11041 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11042 debug_params->asd_debug_params);
11043 }
11044 // Stats
11045 if (debug_params->stats_debug_params_valid == TRUE) {
11046 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11047 debug_params->stats_debug_params);
11048 }
11049 // BE Stats
11050 if (debug_params->bestats_debug_params_valid == TRUE) {
11051 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11052 debug_params->bestats_debug_params);
11053 }
11054 // BHIST
11055 if (debug_params->bhist_debug_params_valid == TRUE) {
11056 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11057 debug_params->bhist_debug_params);
11058 }
11059 // 3A Tuning
11060 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11061 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11062 debug_params->q3a_tuning_debug_params);
11063 }
11064 }
11065
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011066 // Add metadata which reprocess needs
11067 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11068 cam_reprocess_info_t *repro_info =
11069 (cam_reprocess_info_t *)frame_settings.find
11070 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011071 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011072 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011073 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011074 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011075 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011076 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011077 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011078 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011079 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011080 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011081 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011082 repro_info->pipeline_flip);
11083 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11084 repro_info->af_roi);
11085 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11086 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011087 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11088 CAM_INTF_PARM_ROTATION metadata then has been added in
11089 translateToHalMetadata. HAL need to keep this new rotation
11090 metadata. Otherwise, the old rotation info saved in the vendor tag
11091 would be used */
11092 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11093 CAM_INTF_PARM_ROTATION, reprocParam) {
11094 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11095 } else {
11096 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011097 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011098 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011099 }
11100
11101 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11102 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11103 roi.width and roi.height would be the final JPEG size.
11104 For now, HAL only checks this for reprocess request */
11105 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11106 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11107 uint8_t *enable =
11108 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11109 if (*enable == TRUE) {
11110 int32_t *crop_data =
11111 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11112 cam_stream_crop_info_t crop_meta;
11113 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11114 crop_meta.stream_id = 0;
11115 crop_meta.crop.left = crop_data[0];
11116 crop_meta.crop.top = crop_data[1];
11117 crop_meta.crop.width = crop_data[2];
11118 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011119 // The JPEG crop roi should match cpp output size
11120 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11121 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11122 crop_meta.roi_map.left = 0;
11123 crop_meta.roi_map.top = 0;
11124 crop_meta.roi_map.width = cpp_crop->crop.width;
11125 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011126 }
11127 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11128 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011129 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011130 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011131 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11132 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011133 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011134 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11135
11136 // Add JPEG scale information
11137 cam_dimension_t scale_dim;
11138 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11139 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11140 int32_t *roi =
11141 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11142 scale_dim.width = roi[2];
11143 scale_dim.height = roi[3];
11144 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11145 scale_dim);
11146 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11147 scale_dim.width, scale_dim.height, mCameraId);
11148 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011149 }
11150 }
11151
11152 return rc;
11153}
11154
11155/*===========================================================================
11156 * FUNCTION : saveRequestSettings
11157 *
11158 * DESCRIPTION: Add any settings that might have changed to the request settings
11159 * and save the settings to be applied on the frame
11160 *
11161 * PARAMETERS :
11162 * @jpegMetadata : the extracted and/or modified jpeg metadata
11163 * @request : request with initial settings
11164 *
11165 * RETURN :
11166 * camera_metadata_t* : pointer to the saved request settings
11167 *==========================================================================*/
11168camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11169 const CameraMetadata &jpegMetadata,
11170 camera3_capture_request_t *request)
11171{
11172 camera_metadata_t *resultMetadata;
11173 CameraMetadata camMetadata;
11174 camMetadata = request->settings;
11175
11176 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11177 int32_t thumbnail_size[2];
11178 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11179 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11180 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11181 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11182 }
11183
11184 if (request->input_buffer != NULL) {
11185 uint8_t reprocessFlags = 1;
11186 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11187 (uint8_t*)&reprocessFlags,
11188 sizeof(reprocessFlags));
11189 }
11190
11191 resultMetadata = camMetadata.release();
11192 return resultMetadata;
11193}
11194
11195/*===========================================================================
11196 * FUNCTION : setHalFpsRange
11197 *
11198 * DESCRIPTION: set FPS range parameter
11199 *
11200 *
11201 * PARAMETERS :
11202 * @settings : Metadata from framework
11203 * @hal_metadata: Metadata buffer
11204 *
11205 *
11206 * RETURN : success: NO_ERROR
11207 * failure:
11208 *==========================================================================*/
11209int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11210 metadata_buffer_t *hal_metadata)
11211{
11212 int32_t rc = NO_ERROR;
11213 cam_fps_range_t fps_range;
11214 fps_range.min_fps = (float)
11215 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11216 fps_range.max_fps = (float)
11217 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11218 fps_range.video_min_fps = fps_range.min_fps;
11219 fps_range.video_max_fps = fps_range.max_fps;
11220
11221 LOGD("aeTargetFpsRange fps: [%f %f]",
11222 fps_range.min_fps, fps_range.max_fps);
11223 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11224 * follows:
11225 * ---------------------------------------------------------------|
11226 * Video stream is absent in configure_streams |
11227 * (Camcorder preview before the first video record |
11228 * ---------------------------------------------------------------|
11229 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11230 * | | | vid_min/max_fps|
11231 * ---------------------------------------------------------------|
11232 * NO | [ 30, 240] | 240 | [240, 240] |
11233 * |-------------|-------------|----------------|
11234 * | [240, 240] | 240 | [240, 240] |
11235 * ---------------------------------------------------------------|
11236 * Video stream is present in configure_streams |
11237 * ---------------------------------------------------------------|
11238 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11239 * | | | vid_min/max_fps|
11240 * ---------------------------------------------------------------|
11241 * NO | [ 30, 240] | 240 | [240, 240] |
11242 * (camcorder prev |-------------|-------------|----------------|
11243 * after video rec | [240, 240] | 240 | [240, 240] |
11244 * is stopped) | | | |
11245 * ---------------------------------------------------------------|
11246 * YES | [ 30, 240] | 240 | [240, 240] |
11247 * |-------------|-------------|----------------|
11248 * | [240, 240] | 240 | [240, 240] |
11249 * ---------------------------------------------------------------|
11250 * When Video stream is absent in configure_streams,
11251 * preview fps = sensor_fps / batchsize
11252 * Eg: for 240fps at batchSize 4, preview = 60fps
11253 * for 120fps at batchSize 4, preview = 30fps
11254 *
11255 * When video stream is present in configure_streams, preview fps is as per
11256 * the ratio of preview buffers to video buffers requested in process
11257 * capture request
11258 */
11259 mBatchSize = 0;
11260 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11261 fps_range.min_fps = fps_range.video_max_fps;
11262 fps_range.video_min_fps = fps_range.video_max_fps;
11263 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11264 fps_range.max_fps);
11265 if (NAME_NOT_FOUND != val) {
11266 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11268 return BAD_VALUE;
11269 }
11270
11271 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11272 /* If batchmode is currently in progress and the fps changes,
11273 * set the flag to restart the sensor */
11274 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11275 (mHFRVideoFps != fps_range.max_fps)) {
11276 mNeedSensorRestart = true;
11277 }
11278 mHFRVideoFps = fps_range.max_fps;
11279 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11280 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11281 mBatchSize = MAX_HFR_BATCH_SIZE;
11282 }
11283 }
11284 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11285
11286 }
11287 } else {
11288 /* HFR mode is session param in backend/ISP. This should be reset when
11289 * in non-HFR mode */
11290 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11291 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11292 return BAD_VALUE;
11293 }
11294 }
11295 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11296 return BAD_VALUE;
11297 }
11298 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11299 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11300 return rc;
11301}
11302
11303/*===========================================================================
11304 * FUNCTION : translateToHalMetadata
11305 *
11306 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11307 *
11308 *
11309 * PARAMETERS :
11310 * @request : request sent from framework
11311 *
11312 *
11313 * RETURN : success: NO_ERROR
11314 * failure:
11315 *==========================================================================*/
11316int QCamera3HardwareInterface::translateToHalMetadata
11317 (const camera3_capture_request_t *request,
11318 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011319 uint32_t snapshotStreamId) {
11320 if (request == nullptr || hal_metadata == nullptr) {
11321 return BAD_VALUE;
11322 }
11323
11324 int64_t minFrameDuration = getMinFrameDuration(request);
11325
11326 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11327 minFrameDuration);
11328}
11329
11330int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11331 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11332 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11333
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 int rc = 0;
11335 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011336 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011337
11338 /* Do not change the order of the following list unless you know what you are
11339 * doing.
11340 * The order is laid out in such a way that parameters in the front of the table
11341 * may be used to override the parameters later in the table. Examples are:
11342 * 1. META_MODE should precede AEC/AWB/AF MODE
11343 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11344 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11345 * 4. Any mode should precede it's corresponding settings
11346 */
11347 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11348 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11350 rc = BAD_VALUE;
11351 }
11352 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11353 if (rc != NO_ERROR) {
11354 LOGE("extractSceneMode failed");
11355 }
11356 }
11357
11358 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11359 uint8_t fwk_aeMode =
11360 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11361 uint8_t aeMode;
11362 int32_t redeye;
11363
11364 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11365 aeMode = CAM_AE_MODE_OFF;
11366 } else {
11367 aeMode = CAM_AE_MODE_ON;
11368 }
11369 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11370 redeye = 1;
11371 } else {
11372 redeye = 0;
11373 }
11374
11375 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11376 fwk_aeMode);
11377 if (NAME_NOT_FOUND != val) {
11378 int32_t flashMode = (int32_t)val;
11379 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11380 }
11381
11382 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11384 rc = BAD_VALUE;
11385 }
11386 }
11387
11388 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11389 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11390 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11391 fwk_whiteLevel);
11392 if (NAME_NOT_FOUND != val) {
11393 uint8_t whiteLevel = (uint8_t)val;
11394 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11395 rc = BAD_VALUE;
11396 }
11397 }
11398 }
11399
11400 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11401 uint8_t fwk_cacMode =
11402 frame_settings.find(
11403 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11404 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11405 fwk_cacMode);
11406 if (NAME_NOT_FOUND != val) {
11407 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11408 bool entryAvailable = FALSE;
11409 // Check whether Frameworks set CAC mode is supported in device or not
11410 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11411 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11412 entryAvailable = TRUE;
11413 break;
11414 }
11415 }
11416 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11417 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11418 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11419 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11420 if (entryAvailable == FALSE) {
11421 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11422 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11423 } else {
11424 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11425 // High is not supported and so set the FAST as spec say's underlying
11426 // device implementation can be the same for both modes.
11427 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11428 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11429 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11430 // in order to avoid the fps drop due to high quality
11431 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11432 } else {
11433 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11434 }
11435 }
11436 }
11437 LOGD("Final cacMode is %d", cacMode);
11438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11439 rc = BAD_VALUE;
11440 }
11441 } else {
11442 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11443 }
11444 }
11445
Thierry Strudel2896d122017-02-23 19:18:03 -080011446 char af_value[PROPERTY_VALUE_MAX];
11447 property_get("persist.camera.af.infinity", af_value, "0");
11448
Jason Lee84ae9972017-02-24 13:24:24 -080011449 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011450 if (atoi(af_value) == 0) {
11451 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011452 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011453 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11454 fwk_focusMode);
11455 if (NAME_NOT_FOUND != val) {
11456 uint8_t focusMode = (uint8_t)val;
11457 LOGD("set focus mode %d", focusMode);
11458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11459 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11460 rc = BAD_VALUE;
11461 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011462 }
11463 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011464 } else {
11465 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11466 LOGE("Focus forced to infinity %d", focusMode);
11467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11468 rc = BAD_VALUE;
11469 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011470 }
11471
Jason Lee84ae9972017-02-24 13:24:24 -080011472 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11473 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011474 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11476 focalDistance)) {
11477 rc = BAD_VALUE;
11478 }
11479 }
11480
11481 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11482 uint8_t fwk_antibandingMode =
11483 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11484 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11485 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11486 if (NAME_NOT_FOUND != val) {
11487 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011488 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11489 if (m60HzZone) {
11490 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11491 } else {
11492 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11493 }
11494 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011495 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11496 hal_antibandingMode)) {
11497 rc = BAD_VALUE;
11498 }
11499 }
11500 }
11501
11502 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11503 int32_t expCompensation = frame_settings.find(
11504 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11505 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11506 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11507 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11508 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011509 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011510 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11511 expCompensation)) {
11512 rc = BAD_VALUE;
11513 }
11514 }
11515
11516 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11517 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11519 rc = BAD_VALUE;
11520 }
11521 }
11522 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11523 rc = setHalFpsRange(frame_settings, hal_metadata);
11524 if (rc != NO_ERROR) {
11525 LOGE("setHalFpsRange failed");
11526 }
11527 }
11528
11529 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11530 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11532 rc = BAD_VALUE;
11533 }
11534 }
11535
11536 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11537 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11538 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11539 fwk_effectMode);
11540 if (NAME_NOT_FOUND != val) {
11541 uint8_t effectMode = (uint8_t)val;
11542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11543 rc = BAD_VALUE;
11544 }
11545 }
11546 }
11547
11548 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11549 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11551 colorCorrectMode)) {
11552 rc = BAD_VALUE;
11553 }
11554 }
11555
11556 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11557 cam_color_correct_gains_t colorCorrectGains;
11558 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11559 colorCorrectGains.gains[i] =
11560 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11561 }
11562 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11563 colorCorrectGains)) {
11564 rc = BAD_VALUE;
11565 }
11566 }
11567
11568 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11569 cam_color_correct_matrix_t colorCorrectTransform;
11570 cam_rational_type_t transform_elem;
11571 size_t num = 0;
11572 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11573 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11574 transform_elem.numerator =
11575 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11576 transform_elem.denominator =
11577 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11578 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11579 num++;
11580 }
11581 }
11582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11583 colorCorrectTransform)) {
11584 rc = BAD_VALUE;
11585 }
11586 }
11587
11588 cam_trigger_t aecTrigger;
11589 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11590 aecTrigger.trigger_id = -1;
11591 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11592 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11593 aecTrigger.trigger =
11594 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11595 aecTrigger.trigger_id =
11596 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11598 aecTrigger)) {
11599 rc = BAD_VALUE;
11600 }
11601 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11602 aecTrigger.trigger, aecTrigger.trigger_id);
11603 }
11604
11605 /*af_trigger must come with a trigger id*/
11606 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11607 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11608 cam_trigger_t af_trigger;
11609 af_trigger.trigger =
11610 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11611 af_trigger.trigger_id =
11612 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11614 rc = BAD_VALUE;
11615 }
11616 LOGD("AfTrigger: %d AfTriggerID: %d",
11617 af_trigger.trigger, af_trigger.trigger_id);
11618 }
11619
11620 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11621 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11623 rc = BAD_VALUE;
11624 }
11625 }
11626 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11627 cam_edge_application_t edge_application;
11628 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011629
Thierry Strudel3d639192016-09-09 11:52:26 -070011630 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11631 edge_application.sharpness = 0;
11632 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011633 edge_application.sharpness =
11634 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11635 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11636 int32_t sharpness =
11637 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11638 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11639 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11640 LOGD("Setting edge mode sharpness %d", sharpness);
11641 edge_application.sharpness = sharpness;
11642 }
11643 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011644 }
11645 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11646 rc = BAD_VALUE;
11647 }
11648 }
11649
11650 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11651 int32_t respectFlashMode = 1;
11652 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11653 uint8_t fwk_aeMode =
11654 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11655 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11656 respectFlashMode = 0;
11657 LOGH("AE Mode controls flash, ignore android.flash.mode");
11658 }
11659 }
11660 if (respectFlashMode) {
11661 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11662 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11663 LOGH("flash mode after mapping %d", val);
11664 // To check: CAM_INTF_META_FLASH_MODE usage
11665 if (NAME_NOT_FOUND != val) {
11666 uint8_t flashMode = (uint8_t)val;
11667 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11668 rc = BAD_VALUE;
11669 }
11670 }
11671 }
11672 }
11673
11674 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11675 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11676 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11677 rc = BAD_VALUE;
11678 }
11679 }
11680
11681 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11682 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11683 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11684 flashFiringTime)) {
11685 rc = BAD_VALUE;
11686 }
11687 }
11688
11689 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11690 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11692 hotPixelMode)) {
11693 rc = BAD_VALUE;
11694 }
11695 }
11696
11697 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11698 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11700 lensAperture)) {
11701 rc = BAD_VALUE;
11702 }
11703 }
11704
11705 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11706 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11708 filterDensity)) {
11709 rc = BAD_VALUE;
11710 }
11711 }
11712
11713 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11714 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11716 focalLength)) {
11717 rc = BAD_VALUE;
11718 }
11719 }
11720
11721 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11722 uint8_t optStabMode =
11723 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11725 optStabMode)) {
11726 rc = BAD_VALUE;
11727 }
11728 }
11729
11730 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11731 uint8_t videoStabMode =
11732 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11733 LOGD("videoStabMode from APP = %d", videoStabMode);
11734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11735 videoStabMode)) {
11736 rc = BAD_VALUE;
11737 }
11738 }
11739
11740
11741 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11742 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11744 noiseRedMode)) {
11745 rc = BAD_VALUE;
11746 }
11747 }
11748
11749 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11750 float reprocessEffectiveExposureFactor =
11751 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11753 reprocessEffectiveExposureFactor)) {
11754 rc = BAD_VALUE;
11755 }
11756 }
11757
11758 cam_crop_region_t scalerCropRegion;
11759 bool scalerCropSet = false;
11760 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11761 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11762 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11763 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11764 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11765
11766 // Map coordinate system from active array to sensor output.
11767 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11768 scalerCropRegion.width, scalerCropRegion.height);
11769
11770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11771 scalerCropRegion)) {
11772 rc = BAD_VALUE;
11773 }
11774 scalerCropSet = true;
11775 }
11776
11777 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11778 int64_t sensorExpTime =
11779 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11780 LOGD("setting sensorExpTime %lld", sensorExpTime);
11781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11782 sensorExpTime)) {
11783 rc = BAD_VALUE;
11784 }
11785 }
11786
11787 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11788 int64_t sensorFrameDuration =
11789 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011790 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11791 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11792 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11793 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11795 sensorFrameDuration)) {
11796 rc = BAD_VALUE;
11797 }
11798 }
11799
11800 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11801 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11802 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11803 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11804 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11805 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11806 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11807 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11808 sensorSensitivity)) {
11809 rc = BAD_VALUE;
11810 }
11811 }
11812
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011813#ifndef USE_HAL_3_3
11814 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11815 int32_t ispSensitivity =
11816 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11817 if (ispSensitivity <
11818 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11819 ispSensitivity =
11820 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11821 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11822 }
11823 if (ispSensitivity >
11824 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11825 ispSensitivity =
11826 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11827 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11828 }
11829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11830 ispSensitivity)) {
11831 rc = BAD_VALUE;
11832 }
11833 }
11834#endif
11835
Thierry Strudel3d639192016-09-09 11:52:26 -070011836 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11837 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11839 rc = BAD_VALUE;
11840 }
11841 }
11842
11843 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11844 uint8_t fwk_facedetectMode =
11845 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11846
11847 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11848 fwk_facedetectMode);
11849
11850 if (NAME_NOT_FOUND != val) {
11851 uint8_t facedetectMode = (uint8_t)val;
11852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11853 facedetectMode)) {
11854 rc = BAD_VALUE;
11855 }
11856 }
11857 }
11858
Thierry Strudel54dc9782017-02-15 12:12:10 -080011859 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011860 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011861 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11863 histogramMode)) {
11864 rc = BAD_VALUE;
11865 }
11866 }
11867
11868 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11869 uint8_t sharpnessMapMode =
11870 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11872 sharpnessMapMode)) {
11873 rc = BAD_VALUE;
11874 }
11875 }
11876
11877 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11878 uint8_t tonemapMode =
11879 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11881 rc = BAD_VALUE;
11882 }
11883 }
11884 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11885 /*All tonemap channels will have the same number of points*/
11886 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11887 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11888 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11889 cam_rgb_tonemap_curves tonemapCurves;
11890 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11891 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11892 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11893 tonemapCurves.tonemap_points_cnt,
11894 CAM_MAX_TONEMAP_CURVE_SIZE);
11895 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11896 }
11897
11898 /* ch0 = G*/
11899 size_t point = 0;
11900 cam_tonemap_curve_t tonemapCurveGreen;
11901 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11902 for (size_t j = 0; j < 2; j++) {
11903 tonemapCurveGreen.tonemap_points[i][j] =
11904 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11905 point++;
11906 }
11907 }
11908 tonemapCurves.curves[0] = tonemapCurveGreen;
11909
11910 /* ch 1 = B */
11911 point = 0;
11912 cam_tonemap_curve_t tonemapCurveBlue;
11913 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11914 for (size_t j = 0; j < 2; j++) {
11915 tonemapCurveBlue.tonemap_points[i][j] =
11916 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11917 point++;
11918 }
11919 }
11920 tonemapCurves.curves[1] = tonemapCurveBlue;
11921
11922 /* ch 2 = R */
11923 point = 0;
11924 cam_tonemap_curve_t tonemapCurveRed;
11925 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11926 for (size_t j = 0; j < 2; j++) {
11927 tonemapCurveRed.tonemap_points[i][j] =
11928 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11929 point++;
11930 }
11931 }
11932 tonemapCurves.curves[2] = tonemapCurveRed;
11933
11934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11935 tonemapCurves)) {
11936 rc = BAD_VALUE;
11937 }
11938 }
11939
11940 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11941 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11943 captureIntent)) {
11944 rc = BAD_VALUE;
11945 }
11946 }
11947
11948 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11949 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11951 blackLevelLock)) {
11952 rc = BAD_VALUE;
11953 }
11954 }
11955
11956 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11957 uint8_t lensShadingMapMode =
11958 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11960 lensShadingMapMode)) {
11961 rc = BAD_VALUE;
11962 }
11963 }
11964
11965 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11966 cam_area_t roi;
11967 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011968 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011969
11970 // Map coordinate system from active array to sensor output.
11971 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11972 roi.rect.height);
11973
11974 if (scalerCropSet) {
11975 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11976 }
11977 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11978 rc = BAD_VALUE;
11979 }
11980 }
11981
11982 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11983 cam_area_t roi;
11984 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011985 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011986
11987 // Map coordinate system from active array to sensor output.
11988 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11989 roi.rect.height);
11990
11991 if (scalerCropSet) {
11992 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11993 }
11994 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11995 rc = BAD_VALUE;
11996 }
11997 }
11998
11999 // CDS for non-HFR non-video mode
12000 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12001 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12002 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12003 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12004 LOGE("Invalid CDS mode %d!", *fwk_cds);
12005 } else {
12006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12007 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12008 rc = BAD_VALUE;
12009 }
12010 }
12011 }
12012
Thierry Strudel04e026f2016-10-10 11:27:36 -070012013 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012014 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012015 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012016 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12017 }
12018 if (m_bVideoHdrEnabled)
12019 vhdr = CAM_VIDEO_HDR_MODE_ON;
12020
Thierry Strudel54dc9782017-02-15 12:12:10 -080012021 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12022
12023 if(vhdr != curr_hdr_state)
12024 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12025
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012026 rc = setVideoHdrMode(mParameters, vhdr);
12027 if (rc != NO_ERROR) {
12028 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012029 }
12030
12031 //IR
12032 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12033 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12034 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012035 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12036 uint8_t isIRon = 0;
12037
12038 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012039 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12040 LOGE("Invalid IR mode %d!", fwk_ir);
12041 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012042 if(isIRon != curr_ir_state )
12043 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12044
Thierry Strudel04e026f2016-10-10 11:27:36 -070012045 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12046 CAM_INTF_META_IR_MODE, fwk_ir)) {
12047 rc = BAD_VALUE;
12048 }
12049 }
12050 }
12051
Thierry Strudel54dc9782017-02-15 12:12:10 -080012052 //Binning Correction Mode
12053 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12054 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12055 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12056 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12057 || (0 > fwk_binning_correction)) {
12058 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12059 } else {
12060 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12061 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12062 rc = BAD_VALUE;
12063 }
12064 }
12065 }
12066
Thierry Strudel269c81a2016-10-12 12:13:59 -070012067 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12068 float aec_speed;
12069 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12070 LOGD("AEC Speed :%f", aec_speed);
12071 if ( aec_speed < 0 ) {
12072 LOGE("Invalid AEC mode %f!", aec_speed);
12073 } else {
12074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12075 aec_speed)) {
12076 rc = BAD_VALUE;
12077 }
12078 }
12079 }
12080
12081 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12082 float awb_speed;
12083 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12084 LOGD("AWB Speed :%f", awb_speed);
12085 if ( awb_speed < 0 ) {
12086 LOGE("Invalid AWB mode %f!", awb_speed);
12087 } else {
12088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12089 awb_speed)) {
12090 rc = BAD_VALUE;
12091 }
12092 }
12093 }
12094
Thierry Strudel3d639192016-09-09 11:52:26 -070012095 // TNR
12096 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12097 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12098 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012099 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012100 cam_denoise_param_t tnr;
12101 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12102 tnr.process_plates =
12103 (cam_denoise_process_type_t)frame_settings.find(
12104 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12105 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012106
12107 if(b_TnrRequested != curr_tnr_state)
12108 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12109
Thierry Strudel3d639192016-09-09 11:52:26 -070012110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12111 rc = BAD_VALUE;
12112 }
12113 }
12114
Thierry Strudel54dc9782017-02-15 12:12:10 -080012115 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012116 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012117 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12119 *exposure_metering_mode)) {
12120 rc = BAD_VALUE;
12121 }
12122 }
12123
Thierry Strudel3d639192016-09-09 11:52:26 -070012124 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12125 int32_t fwk_testPatternMode =
12126 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12127 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12128 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12129
12130 if (NAME_NOT_FOUND != testPatternMode) {
12131 cam_test_pattern_data_t testPatternData;
12132 memset(&testPatternData, 0, sizeof(testPatternData));
12133 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12134 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12135 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12136 int32_t *fwk_testPatternData =
12137 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12138 testPatternData.r = fwk_testPatternData[0];
12139 testPatternData.b = fwk_testPatternData[3];
12140 switch (gCamCapability[mCameraId]->color_arrangement) {
12141 case CAM_FILTER_ARRANGEMENT_RGGB:
12142 case CAM_FILTER_ARRANGEMENT_GRBG:
12143 testPatternData.gr = fwk_testPatternData[1];
12144 testPatternData.gb = fwk_testPatternData[2];
12145 break;
12146 case CAM_FILTER_ARRANGEMENT_GBRG:
12147 case CAM_FILTER_ARRANGEMENT_BGGR:
12148 testPatternData.gr = fwk_testPatternData[2];
12149 testPatternData.gb = fwk_testPatternData[1];
12150 break;
12151 default:
12152 LOGE("color arrangement %d is not supported",
12153 gCamCapability[mCameraId]->color_arrangement);
12154 break;
12155 }
12156 }
12157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12158 testPatternData)) {
12159 rc = BAD_VALUE;
12160 }
12161 } else {
12162 LOGE("Invalid framework sensor test pattern mode %d",
12163 fwk_testPatternMode);
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12168 size_t count = 0;
12169 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12170 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12171 gps_coords.data.d, gps_coords.count, count);
12172 if (gps_coords.count != count) {
12173 rc = BAD_VALUE;
12174 }
12175 }
12176
12177 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12178 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12179 size_t count = 0;
12180 const char *gps_methods_src = (const char *)
12181 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12182 memset(gps_methods, '\0', sizeof(gps_methods));
12183 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12184 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12185 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12186 if (GPS_PROCESSING_METHOD_SIZE != count) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12192 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12194 gps_timestamp)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12200 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12201 cam_rotation_info_t rotation_info;
12202 if (orientation == 0) {
12203 rotation_info.rotation = ROTATE_0;
12204 } else if (orientation == 90) {
12205 rotation_info.rotation = ROTATE_90;
12206 } else if (orientation == 180) {
12207 rotation_info.rotation = ROTATE_180;
12208 } else if (orientation == 270) {
12209 rotation_info.rotation = ROTATE_270;
12210 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012211 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012212 rotation_info.streamId = snapshotStreamId;
12213 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12215 rc = BAD_VALUE;
12216 }
12217 }
12218
12219 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12220 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12222 rc = BAD_VALUE;
12223 }
12224 }
12225
12226 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12227 uint32_t thumb_quality = (uint32_t)
12228 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12229 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12230 thumb_quality)) {
12231 rc = BAD_VALUE;
12232 }
12233 }
12234
12235 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12236 cam_dimension_t dim;
12237 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12238 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12240 rc = BAD_VALUE;
12241 }
12242 }
12243
12244 // Internal metadata
12245 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12246 size_t count = 0;
12247 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12248 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12249 privatedata.data.i32, privatedata.count, count);
12250 if (privatedata.count != count) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012255 // ISO/Exposure Priority
12256 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12257 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12258 cam_priority_mode_t mode =
12259 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12260 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12261 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12262 use_iso_exp_pty.previewOnly = FALSE;
12263 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12264 use_iso_exp_pty.value = *ptr;
12265
12266 if(CAM_ISO_PRIORITY == mode) {
12267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12268 use_iso_exp_pty)) {
12269 rc = BAD_VALUE;
12270 }
12271 }
12272 else {
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12274 use_iso_exp_pty)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012278
12279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12280 rc = BAD_VALUE;
12281 }
12282 }
12283 } else {
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12285 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012286 }
12287 }
12288
12289 // Saturation
12290 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12291 int32_t* use_saturation =
12292 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
Thierry Strudel3d639192016-09-09 11:52:26 -070012298 // EV step
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12300 gCamCapability[mCameraId]->exp_compensation_step)) {
12301 rc = BAD_VALUE;
12302 }
12303
12304 // CDS info
12305 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12306 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12307 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12308
12309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12310 CAM_INTF_META_CDS_DATA, *cdsData)) {
12311 rc = BAD_VALUE;
12312 }
12313 }
12314
Shuzhen Wang19463d72016-03-08 11:09:52 -080012315 // Hybrid AE
12316 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12317 uint8_t *hybrid_ae = (uint8_t *)
12318 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12319
12320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12321 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12322 rc = BAD_VALUE;
12323 }
12324 }
12325
Thierry Strudel3d639192016-09-09 11:52:26 -070012326 return rc;
12327}
12328
12329/*===========================================================================
12330 * FUNCTION : captureResultCb
12331 *
12332 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12333 *
12334 * PARAMETERS :
12335 * @frame : frame information from mm-camera-interface
12336 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12337 * @userdata: userdata
12338 *
12339 * RETURN : NONE
12340 *==========================================================================*/
12341void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12342 camera3_stream_buffer_t *buffer,
12343 uint32_t frame_number, bool isInputBuffer, void *userdata)
12344{
12345 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12346 if (hw == NULL) {
12347 LOGE("Invalid hw %p", hw);
12348 return;
12349 }
12350
12351 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12352 return;
12353}
12354
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012355/*===========================================================================
12356 * FUNCTION : setBufferErrorStatus
12357 *
12358 * DESCRIPTION: Callback handler for channels to report any buffer errors
12359 *
12360 * PARAMETERS :
12361 * @ch : Channel on which buffer error is reported from
12362 * @frame_number : frame number on which buffer error is reported on
12363 * @buffer_status : buffer error status
12364 * @userdata: userdata
12365 *
12366 * RETURN : NONE
12367 *==========================================================================*/
12368void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12369 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12370{
12371 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12372 if (hw == NULL) {
12373 LOGE("Invalid hw %p", hw);
12374 return;
12375 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012376
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012377 hw->setBufferErrorStatus(ch, frame_number, err);
12378 return;
12379}
12380
12381void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12382 uint32_t frameNumber, camera3_buffer_status_t err)
12383{
12384 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12385 pthread_mutex_lock(&mMutex);
12386
12387 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12388 if (req.frame_number != frameNumber)
12389 continue;
12390 for (auto& k : req.mPendingBufferList) {
12391 if(k.stream->priv == ch) {
12392 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12393 }
12394 }
12395 }
12396
12397 pthread_mutex_unlock(&mMutex);
12398 return;
12399}
Thierry Strudel3d639192016-09-09 11:52:26 -070012400/*===========================================================================
12401 * FUNCTION : initialize
12402 *
12403 * DESCRIPTION: Pass framework callback pointers to HAL
12404 *
12405 * PARAMETERS :
12406 *
12407 *
12408 * RETURN : Success : 0
12409 * Failure: -ENODEV
12410 *==========================================================================*/
12411
12412int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12413 const camera3_callback_ops_t *callback_ops)
12414{
12415 LOGD("E");
12416 QCamera3HardwareInterface *hw =
12417 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12418 if (!hw) {
12419 LOGE("NULL camera device");
12420 return -ENODEV;
12421 }
12422
12423 int rc = hw->initialize(callback_ops);
12424 LOGD("X");
12425 return rc;
12426}
12427
12428/*===========================================================================
12429 * FUNCTION : configure_streams
12430 *
12431 * DESCRIPTION:
12432 *
12433 * PARAMETERS :
12434 *
12435 *
12436 * RETURN : Success: 0
12437 * Failure: -EINVAL (if stream configuration is invalid)
12438 * -ENODEV (fatal error)
12439 *==========================================================================*/
12440
12441int QCamera3HardwareInterface::configure_streams(
12442 const struct camera3_device *device,
12443 camera3_stream_configuration_t *stream_list)
12444{
12445 LOGD("E");
12446 QCamera3HardwareInterface *hw =
12447 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12448 if (!hw) {
12449 LOGE("NULL camera device");
12450 return -ENODEV;
12451 }
12452 int rc = hw->configureStreams(stream_list);
12453 LOGD("X");
12454 return rc;
12455}
12456
12457/*===========================================================================
12458 * FUNCTION : construct_default_request_settings
12459 *
12460 * DESCRIPTION: Configure a settings buffer to meet the required use case
12461 *
12462 * PARAMETERS :
12463 *
12464 *
12465 * RETURN : Success: Return valid metadata
12466 * Failure: Return NULL
12467 *==========================================================================*/
12468const camera_metadata_t* QCamera3HardwareInterface::
12469 construct_default_request_settings(const struct camera3_device *device,
12470 int type)
12471{
12472
12473 LOGD("E");
12474 camera_metadata_t* fwk_metadata = NULL;
12475 QCamera3HardwareInterface *hw =
12476 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12477 if (!hw) {
12478 LOGE("NULL camera device");
12479 return NULL;
12480 }
12481
12482 fwk_metadata = hw->translateCapabilityToMetadata(type);
12483
12484 LOGD("X");
12485 return fwk_metadata;
12486}
12487
12488/*===========================================================================
12489 * FUNCTION : process_capture_request
12490 *
12491 * DESCRIPTION:
12492 *
12493 * PARAMETERS :
12494 *
12495 *
12496 * RETURN :
12497 *==========================================================================*/
12498int QCamera3HardwareInterface::process_capture_request(
12499 const struct camera3_device *device,
12500 camera3_capture_request_t *request)
12501{
12502 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012503 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012504 QCamera3HardwareInterface *hw =
12505 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12506 if (!hw) {
12507 LOGE("NULL camera device");
12508 return -EINVAL;
12509 }
12510
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012511 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012512 LOGD("X");
12513 return rc;
12514}
12515
12516/*===========================================================================
12517 * FUNCTION : dump
12518 *
12519 * DESCRIPTION:
12520 *
12521 * PARAMETERS :
12522 *
12523 *
12524 * RETURN :
12525 *==========================================================================*/
12526
12527void QCamera3HardwareInterface::dump(
12528 const struct camera3_device *device, int fd)
12529{
12530 /* Log level property is read when "adb shell dumpsys media.camera" is
12531 called so that the log level can be controlled without restarting
12532 the media server */
12533 getLogLevel();
12534
12535 LOGD("E");
12536 QCamera3HardwareInterface *hw =
12537 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12538 if (!hw) {
12539 LOGE("NULL camera device");
12540 return;
12541 }
12542
12543 hw->dump(fd);
12544 LOGD("X");
12545 return;
12546}
12547
12548/*===========================================================================
12549 * FUNCTION : flush
12550 *
12551 * DESCRIPTION:
12552 *
12553 * PARAMETERS :
12554 *
12555 *
12556 * RETURN :
12557 *==========================================================================*/
12558
12559int QCamera3HardwareInterface::flush(
12560 const struct camera3_device *device)
12561{
12562 int rc;
12563 LOGD("E");
12564 QCamera3HardwareInterface *hw =
12565 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12566 if (!hw) {
12567 LOGE("NULL camera device");
12568 return -EINVAL;
12569 }
12570
12571 pthread_mutex_lock(&hw->mMutex);
12572 // Validate current state
12573 switch (hw->mState) {
12574 case STARTED:
12575 /* valid state */
12576 break;
12577
12578 case ERROR:
12579 pthread_mutex_unlock(&hw->mMutex);
12580 hw->handleCameraDeviceError();
12581 return -ENODEV;
12582
12583 default:
12584 LOGI("Flush returned during state %d", hw->mState);
12585 pthread_mutex_unlock(&hw->mMutex);
12586 return 0;
12587 }
12588 pthread_mutex_unlock(&hw->mMutex);
12589
12590 rc = hw->flush(true /* restart channels */ );
12591 LOGD("X");
12592 return rc;
12593}
12594
12595/*===========================================================================
12596 * FUNCTION : close_camera_device
12597 *
12598 * DESCRIPTION:
12599 *
12600 * PARAMETERS :
12601 *
12602 *
12603 * RETURN :
12604 *==========================================================================*/
12605int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12606{
12607 int ret = NO_ERROR;
12608 QCamera3HardwareInterface *hw =
12609 reinterpret_cast<QCamera3HardwareInterface *>(
12610 reinterpret_cast<camera3_device_t *>(device)->priv);
12611 if (!hw) {
12612 LOGE("NULL camera device");
12613 return BAD_VALUE;
12614 }
12615
12616 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12617 delete hw;
12618 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012619 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012620 return ret;
12621}
12622
12623/*===========================================================================
12624 * FUNCTION : getWaveletDenoiseProcessPlate
12625 *
12626 * DESCRIPTION: query wavelet denoise process plate
12627 *
12628 * PARAMETERS : None
12629 *
12630 * RETURN : WNR prcocess plate value
12631 *==========================================================================*/
12632cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12633{
12634 char prop[PROPERTY_VALUE_MAX];
12635 memset(prop, 0, sizeof(prop));
12636 property_get("persist.denoise.process.plates", prop, "0");
12637 int processPlate = atoi(prop);
12638 switch(processPlate) {
12639 case 0:
12640 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12641 case 1:
12642 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12643 case 2:
12644 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12645 case 3:
12646 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12647 default:
12648 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12649 }
12650}
12651
12652
12653/*===========================================================================
12654 * FUNCTION : getTemporalDenoiseProcessPlate
12655 *
12656 * DESCRIPTION: query temporal denoise process plate
12657 *
12658 * PARAMETERS : None
12659 *
12660 * RETURN : TNR prcocess plate value
12661 *==========================================================================*/
12662cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12663{
12664 char prop[PROPERTY_VALUE_MAX];
12665 memset(prop, 0, sizeof(prop));
12666 property_get("persist.tnr.process.plates", prop, "0");
12667 int processPlate = atoi(prop);
12668 switch(processPlate) {
12669 case 0:
12670 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12671 case 1:
12672 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12673 case 2:
12674 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12675 case 3:
12676 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12677 default:
12678 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12679 }
12680}
12681
12682
12683/*===========================================================================
12684 * FUNCTION : extractSceneMode
12685 *
12686 * DESCRIPTION: Extract scene mode from frameworks set metadata
12687 *
12688 * PARAMETERS :
12689 * @frame_settings: CameraMetadata reference
12690 * @metaMode: ANDROID_CONTORL_MODE
12691 * @hal_metadata: hal metadata structure
12692 *
12693 * RETURN : None
12694 *==========================================================================*/
12695int32_t QCamera3HardwareInterface::extractSceneMode(
12696 const CameraMetadata &frame_settings, uint8_t metaMode,
12697 metadata_buffer_t *hal_metadata)
12698{
12699 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012700 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12701
12702 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12703 LOGD("Ignoring control mode OFF_KEEP_STATE");
12704 return NO_ERROR;
12705 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012706
12707 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12708 camera_metadata_ro_entry entry =
12709 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12710 if (0 == entry.count)
12711 return rc;
12712
12713 uint8_t fwk_sceneMode = entry.data.u8[0];
12714
12715 int val = lookupHalName(SCENE_MODES_MAP,
12716 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12717 fwk_sceneMode);
12718 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012719 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012720 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012721 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012722 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012723
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012724 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12725 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12726 }
12727
12728 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12729 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012730 cam_hdr_param_t hdr_params;
12731 hdr_params.hdr_enable = 1;
12732 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12733 hdr_params.hdr_need_1x = false;
12734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12735 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12736 rc = BAD_VALUE;
12737 }
12738 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012739
Thierry Strudel3d639192016-09-09 11:52:26 -070012740 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12741 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12742 rc = BAD_VALUE;
12743 }
12744 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012745
12746 if (mForceHdrSnapshot) {
12747 cam_hdr_param_t hdr_params;
12748 hdr_params.hdr_enable = 1;
12749 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12750 hdr_params.hdr_need_1x = false;
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12752 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
Thierry Strudel3d639192016-09-09 11:52:26 -070012757 return rc;
12758}
12759
12760/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012761 * FUNCTION : setVideoHdrMode
12762 *
12763 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12764 *
12765 * PARAMETERS :
12766 * @hal_metadata: hal metadata structure
12767 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12768 *
12769 * RETURN : None
12770 *==========================================================================*/
12771int32_t QCamera3HardwareInterface::setVideoHdrMode(
12772 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12773{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012774 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12775 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12776 }
12777
12778 LOGE("Invalid Video HDR mode %d!", vhdr);
12779 return BAD_VALUE;
12780}
12781
12782/*===========================================================================
12783 * FUNCTION : setSensorHDR
12784 *
12785 * DESCRIPTION: Enable/disable sensor HDR.
12786 *
12787 * PARAMETERS :
12788 * @hal_metadata: hal metadata structure
12789 * @enable: boolean whether to enable/disable sensor HDR
12790 *
12791 * RETURN : None
12792 *==========================================================================*/
12793int32_t QCamera3HardwareInterface::setSensorHDR(
12794 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12795{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012796 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012797 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12798
12799 if (enable) {
12800 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12801 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12802 #ifdef _LE_CAMERA_
12803 //Default to staggered HDR for IOT
12804 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12805 #else
12806 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12807 #endif
12808 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12809 }
12810
12811 bool isSupported = false;
12812 switch (sensor_hdr) {
12813 case CAM_SENSOR_HDR_IN_SENSOR:
12814 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12815 CAM_QCOM_FEATURE_SENSOR_HDR) {
12816 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012817 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012818 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012819 break;
12820 case CAM_SENSOR_HDR_ZIGZAG:
12821 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12822 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12823 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012824 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012825 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012826 break;
12827 case CAM_SENSOR_HDR_STAGGERED:
12828 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12829 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12830 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012831 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012832 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012833 break;
12834 case CAM_SENSOR_HDR_OFF:
12835 isSupported = true;
12836 LOGD("Turning off sensor HDR");
12837 break;
12838 default:
12839 LOGE("HDR mode %d not supported", sensor_hdr);
12840 rc = BAD_VALUE;
12841 break;
12842 }
12843
12844 if(isSupported) {
12845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12846 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12847 rc = BAD_VALUE;
12848 } else {
12849 if(!isVideoHdrEnable)
12850 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012851 }
12852 }
12853 return rc;
12854}
12855
12856/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012857 * FUNCTION : needRotationReprocess
12858 *
12859 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12860 *
12861 * PARAMETERS : none
12862 *
12863 * RETURN : true: needed
12864 * false: no need
12865 *==========================================================================*/
12866bool QCamera3HardwareInterface::needRotationReprocess()
12867{
12868 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12869 // current rotation is not zero, and pp has the capability to process rotation
12870 LOGH("need do reprocess for rotation");
12871 return true;
12872 }
12873
12874 return false;
12875}
12876
12877/*===========================================================================
12878 * FUNCTION : needReprocess
12879 *
12880 * DESCRIPTION: if reprocess in needed
12881 *
12882 * PARAMETERS : none
12883 *
12884 * RETURN : true: needed
12885 * false: no need
12886 *==========================================================================*/
12887bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12888{
12889 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12890 // TODO: add for ZSL HDR later
12891 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12892 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12893 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12894 return true;
12895 } else {
12896 LOGH("already post processed frame");
12897 return false;
12898 }
12899 }
12900 return needRotationReprocess();
12901}
12902
12903/*===========================================================================
12904 * FUNCTION : needJpegExifRotation
12905 *
12906 * DESCRIPTION: if rotation from jpeg is needed
12907 *
12908 * PARAMETERS : none
12909 *
12910 * RETURN : true: needed
12911 * false: no need
12912 *==========================================================================*/
12913bool QCamera3HardwareInterface::needJpegExifRotation()
12914{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012915 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012916 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12917 LOGD("Need use Jpeg EXIF Rotation");
12918 return true;
12919 }
12920 return false;
12921}
12922
12923/*===========================================================================
12924 * FUNCTION : addOfflineReprocChannel
12925 *
12926 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12927 * coming from input channel
12928 *
12929 * PARAMETERS :
12930 * @config : reprocess configuration
12931 * @inputChHandle : pointer to the input (source) channel
12932 *
12933 *
12934 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12935 *==========================================================================*/
12936QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12937 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12938{
12939 int32_t rc = NO_ERROR;
12940 QCamera3ReprocessChannel *pChannel = NULL;
12941
12942 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012943 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12944 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012945 if (NULL == pChannel) {
12946 LOGE("no mem for reprocess channel");
12947 return NULL;
12948 }
12949
12950 rc = pChannel->initialize(IS_TYPE_NONE);
12951 if (rc != NO_ERROR) {
12952 LOGE("init reprocess channel failed, ret = %d", rc);
12953 delete pChannel;
12954 return NULL;
12955 }
12956
12957 // pp feature config
12958 cam_pp_feature_config_t pp_config;
12959 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12960
12961 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12962 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12963 & CAM_QCOM_FEATURE_DSDN) {
12964 //Use CPP CDS incase h/w supports it.
12965 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12966 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12967 }
12968 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12969 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12970 }
12971
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012972 if (config.hdr_param.hdr_enable) {
12973 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12974 pp_config.hdr_param = config.hdr_param;
12975 }
12976
12977 if (mForceHdrSnapshot) {
12978 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12979 pp_config.hdr_param.hdr_enable = 1;
12980 pp_config.hdr_param.hdr_need_1x = 0;
12981 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12982 }
12983
Thierry Strudel3d639192016-09-09 11:52:26 -070012984 rc = pChannel->addReprocStreamsFromSource(pp_config,
12985 config,
12986 IS_TYPE_NONE,
12987 mMetadataChannel);
12988
12989 if (rc != NO_ERROR) {
12990 delete pChannel;
12991 return NULL;
12992 }
12993 return pChannel;
12994}
12995
12996/*===========================================================================
12997 * FUNCTION : getMobicatMask
12998 *
12999 * DESCRIPTION: returns mobicat mask
13000 *
13001 * PARAMETERS : none
13002 *
13003 * RETURN : mobicat mask
13004 *
13005 *==========================================================================*/
13006uint8_t QCamera3HardwareInterface::getMobicatMask()
13007{
13008 return m_MobicatMask;
13009}
13010
13011/*===========================================================================
13012 * FUNCTION : setMobicat
13013 *
13014 * DESCRIPTION: set Mobicat on/off.
13015 *
13016 * PARAMETERS :
13017 * @params : none
13018 *
13019 * RETURN : int32_t type of status
13020 * NO_ERROR -- success
13021 * none-zero failure code
13022 *==========================================================================*/
13023int32_t QCamera3HardwareInterface::setMobicat()
13024{
13025 char value [PROPERTY_VALUE_MAX];
13026 property_get("persist.camera.mobicat", value, "0");
13027 int32_t ret = NO_ERROR;
13028 uint8_t enableMobi = (uint8_t)atoi(value);
13029
13030 if (enableMobi) {
13031 tune_cmd_t tune_cmd;
13032 tune_cmd.type = SET_RELOAD_CHROMATIX;
13033 tune_cmd.module = MODULE_ALL;
13034 tune_cmd.value = TRUE;
13035 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13036 CAM_INTF_PARM_SET_VFE_COMMAND,
13037 tune_cmd);
13038
13039 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13040 CAM_INTF_PARM_SET_PP_COMMAND,
13041 tune_cmd);
13042 }
13043 m_MobicatMask = enableMobi;
13044
13045 return ret;
13046}
13047
13048/*===========================================================================
13049* FUNCTION : getLogLevel
13050*
13051* DESCRIPTION: Reads the log level property into a variable
13052*
13053* PARAMETERS :
13054* None
13055*
13056* RETURN :
13057* None
13058*==========================================================================*/
13059void QCamera3HardwareInterface::getLogLevel()
13060{
13061 char prop[PROPERTY_VALUE_MAX];
13062 uint32_t globalLogLevel = 0;
13063
13064 property_get("persist.camera.hal.debug", prop, "0");
13065 int val = atoi(prop);
13066 if (0 <= val) {
13067 gCamHal3LogLevel = (uint32_t)val;
13068 }
13069
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013070 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013071 gKpiDebugLevel = atoi(prop);
13072
13073 property_get("persist.camera.global.debug", prop, "0");
13074 val = atoi(prop);
13075 if (0 <= val) {
13076 globalLogLevel = (uint32_t)val;
13077 }
13078
13079 /* Highest log level among hal.logs and global.logs is selected */
13080 if (gCamHal3LogLevel < globalLogLevel)
13081 gCamHal3LogLevel = globalLogLevel;
13082
13083 return;
13084}
13085
13086/*===========================================================================
13087 * FUNCTION : validateStreamRotations
13088 *
13089 * DESCRIPTION: Check if the rotations requested are supported
13090 *
13091 * PARAMETERS :
13092 * @stream_list : streams to be configured
13093 *
13094 * RETURN : NO_ERROR on success
13095 * -EINVAL on failure
13096 *
13097 *==========================================================================*/
13098int QCamera3HardwareInterface::validateStreamRotations(
13099 camera3_stream_configuration_t *streamList)
13100{
13101 int rc = NO_ERROR;
13102
13103 /*
13104 * Loop through all streams requested in configuration
13105 * Check if unsupported rotations have been requested on any of them
13106 */
13107 for (size_t j = 0; j < streamList->num_streams; j++){
13108 camera3_stream_t *newStream = streamList->streams[j];
13109
13110 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13111 bool isImplDef = (newStream->format ==
13112 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13113 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13114 isImplDef);
13115
13116 if (isRotated && (!isImplDef || isZsl)) {
13117 LOGE("Error: Unsupported rotation of %d requested for stream"
13118 "type:%d and stream format:%d",
13119 newStream->rotation, newStream->stream_type,
13120 newStream->format);
13121 rc = -EINVAL;
13122 break;
13123 }
13124 }
13125
13126 return rc;
13127}
13128
13129/*===========================================================================
13130* FUNCTION : getFlashInfo
13131*
13132* DESCRIPTION: Retrieve information about whether the device has a flash.
13133*
13134* PARAMETERS :
13135* @cameraId : Camera id to query
13136* @hasFlash : Boolean indicating whether there is a flash device
13137* associated with given camera
13138* @flashNode : If a flash device exists, this will be its device node.
13139*
13140* RETURN :
13141* None
13142*==========================================================================*/
13143void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13144 bool& hasFlash,
13145 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13146{
13147 cam_capability_t* camCapability = gCamCapability[cameraId];
13148 if (NULL == camCapability) {
13149 hasFlash = false;
13150 flashNode[0] = '\0';
13151 } else {
13152 hasFlash = camCapability->flash_available;
13153 strlcpy(flashNode,
13154 (char*)camCapability->flash_dev_name,
13155 QCAMERA_MAX_FILEPATH_LENGTH);
13156 }
13157}
13158
13159/*===========================================================================
13160* FUNCTION : getEepromVersionInfo
13161*
13162* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13163*
13164* PARAMETERS : None
13165*
13166* RETURN : string describing EEPROM version
13167* "\0" if no such info available
13168*==========================================================================*/
13169const char *QCamera3HardwareInterface::getEepromVersionInfo()
13170{
13171 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13172}
13173
13174/*===========================================================================
13175* FUNCTION : getLdafCalib
13176*
13177* DESCRIPTION: Retrieve Laser AF calibration data
13178*
13179* PARAMETERS : None
13180*
13181* RETURN : Two uint32_t describing laser AF calibration data
13182* NULL if none is available.
13183*==========================================================================*/
13184const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13185{
13186 if (mLdafCalibExist) {
13187 return &mLdafCalib[0];
13188 } else {
13189 return NULL;
13190 }
13191}
13192
13193/*===========================================================================
13194 * FUNCTION : dynamicUpdateMetaStreamInfo
13195 *
13196 * DESCRIPTION: This function:
13197 * (1) stops all the channels
13198 * (2) returns error on pending requests and buffers
13199 * (3) sends metastream_info in setparams
13200 * (4) starts all channels
13201 * This is useful when sensor has to be restarted to apply any
13202 * settings such as frame rate from a different sensor mode
13203 *
13204 * PARAMETERS : None
13205 *
13206 * RETURN : NO_ERROR on success
13207 * Error codes on failure
13208 *
13209 *==========================================================================*/
13210int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13211{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013212 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013213 int rc = NO_ERROR;
13214
13215 LOGD("E");
13216
13217 rc = stopAllChannels();
13218 if (rc < 0) {
13219 LOGE("stopAllChannels failed");
13220 return rc;
13221 }
13222
13223 rc = notifyErrorForPendingRequests();
13224 if (rc < 0) {
13225 LOGE("notifyErrorForPendingRequests failed");
13226 return rc;
13227 }
13228
13229 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13230 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13231 "Format:%d",
13232 mStreamConfigInfo.type[i],
13233 mStreamConfigInfo.stream_sizes[i].width,
13234 mStreamConfigInfo.stream_sizes[i].height,
13235 mStreamConfigInfo.postprocess_mask[i],
13236 mStreamConfigInfo.format[i]);
13237 }
13238
13239 /* Send meta stream info once again so that ISP can start */
13240 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13241 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13242 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13243 mParameters);
13244 if (rc < 0) {
13245 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13246 }
13247
13248 rc = startAllChannels();
13249 if (rc < 0) {
13250 LOGE("startAllChannels failed");
13251 return rc;
13252 }
13253
13254 LOGD("X");
13255 return rc;
13256}
13257
13258/*===========================================================================
13259 * FUNCTION : stopAllChannels
13260 *
13261 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13262 *
13263 * PARAMETERS : None
13264 *
13265 * RETURN : NO_ERROR on success
13266 * Error codes on failure
13267 *
13268 *==========================================================================*/
13269int32_t QCamera3HardwareInterface::stopAllChannels()
13270{
13271 int32_t rc = NO_ERROR;
13272
13273 LOGD("Stopping all channels");
13274 // Stop the Streams/Channels
13275 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13276 it != mStreamInfo.end(); it++) {
13277 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13278 if (channel) {
13279 channel->stop();
13280 }
13281 (*it)->status = INVALID;
13282 }
13283
13284 if (mSupportChannel) {
13285 mSupportChannel->stop();
13286 }
13287 if (mAnalysisChannel) {
13288 mAnalysisChannel->stop();
13289 }
13290 if (mRawDumpChannel) {
13291 mRawDumpChannel->stop();
13292 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013293 if (mHdrPlusRawSrcChannel) {
13294 mHdrPlusRawSrcChannel->stop();
13295 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013296 if (mMetadataChannel) {
13297 /* If content of mStreamInfo is not 0, there is metadata stream */
13298 mMetadataChannel->stop();
13299 }
13300
13301 LOGD("All channels stopped");
13302 return rc;
13303}
13304
13305/*===========================================================================
13306 * FUNCTION : startAllChannels
13307 *
13308 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13309 *
13310 * PARAMETERS : None
13311 *
13312 * RETURN : NO_ERROR on success
13313 * Error codes on failure
13314 *
13315 *==========================================================================*/
13316int32_t QCamera3HardwareInterface::startAllChannels()
13317{
13318 int32_t rc = NO_ERROR;
13319
13320 LOGD("Start all channels ");
13321 // Start the Streams/Channels
13322 if (mMetadataChannel) {
13323 /* If content of mStreamInfo is not 0, there is metadata stream */
13324 rc = mMetadataChannel->start();
13325 if (rc < 0) {
13326 LOGE("META channel start failed");
13327 return rc;
13328 }
13329 }
13330 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13331 it != mStreamInfo.end(); it++) {
13332 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13333 if (channel) {
13334 rc = channel->start();
13335 if (rc < 0) {
13336 LOGE("channel start failed");
13337 return rc;
13338 }
13339 }
13340 }
13341 if (mAnalysisChannel) {
13342 mAnalysisChannel->start();
13343 }
13344 if (mSupportChannel) {
13345 rc = mSupportChannel->start();
13346 if (rc < 0) {
13347 LOGE("Support channel start failed");
13348 return rc;
13349 }
13350 }
13351 if (mRawDumpChannel) {
13352 rc = mRawDumpChannel->start();
13353 if (rc < 0) {
13354 LOGE("RAW dump channel start failed");
13355 return rc;
13356 }
13357 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013358 if (mHdrPlusRawSrcChannel) {
13359 rc = mHdrPlusRawSrcChannel->start();
13360 if (rc < 0) {
13361 LOGE("HDR+ RAW channel start failed");
13362 return rc;
13363 }
13364 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013365
13366 LOGD("All channels started");
13367 return rc;
13368}
13369
13370/*===========================================================================
13371 * FUNCTION : notifyErrorForPendingRequests
13372 *
13373 * DESCRIPTION: This function sends error for all the pending requests/buffers
13374 *
13375 * PARAMETERS : None
13376 *
13377 * RETURN : Error codes
13378 * NO_ERROR on success
13379 *
13380 *==========================================================================*/
13381int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13382{
13383 int32_t rc = NO_ERROR;
13384 unsigned int frameNum = 0;
13385 camera3_capture_result_t result;
13386 camera3_stream_buffer_t *pStream_Buf = NULL;
13387
13388 memset(&result, 0, sizeof(camera3_capture_result_t));
13389
13390 if (mPendingRequestsList.size() > 0) {
13391 pendingRequestIterator i = mPendingRequestsList.begin();
13392 frameNum = i->frame_number;
13393 } else {
13394 /* There might still be pending buffers even though there are
13395 no pending requests. Setting the frameNum to MAX so that
13396 all the buffers with smaller frame numbers are returned */
13397 frameNum = UINT_MAX;
13398 }
13399
13400 LOGH("Oldest frame num on mPendingRequestsList = %u",
13401 frameNum);
13402
Emilian Peev7650c122017-01-19 08:24:33 -080013403 notifyErrorFoPendingDepthData(mDepthChannel);
13404
Thierry Strudel3d639192016-09-09 11:52:26 -070013405 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13406 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13407
13408 if (req->frame_number < frameNum) {
13409 // Send Error notify to frameworks for each buffer for which
13410 // metadata buffer is already sent
13411 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13412 req->frame_number, req->mPendingBufferList.size());
13413
13414 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13415 if (NULL == pStream_Buf) {
13416 LOGE("No memory for pending buffers array");
13417 return NO_MEMORY;
13418 }
13419 memset(pStream_Buf, 0,
13420 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13421 result.result = NULL;
13422 result.frame_number = req->frame_number;
13423 result.num_output_buffers = req->mPendingBufferList.size();
13424 result.output_buffers = pStream_Buf;
13425
13426 size_t index = 0;
13427 for (auto info = req->mPendingBufferList.begin();
13428 info != req->mPendingBufferList.end(); ) {
13429
13430 camera3_notify_msg_t notify_msg;
13431 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13432 notify_msg.type = CAMERA3_MSG_ERROR;
13433 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13434 notify_msg.message.error.error_stream = info->stream;
13435 notify_msg.message.error.frame_number = req->frame_number;
13436 pStream_Buf[index].acquire_fence = -1;
13437 pStream_Buf[index].release_fence = -1;
13438 pStream_Buf[index].buffer = info->buffer;
13439 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13440 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013441 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013442 index++;
13443 // Remove buffer from list
13444 info = req->mPendingBufferList.erase(info);
13445 }
13446
13447 // Remove this request from Map
13448 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13449 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13450 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13451
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013452 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013453
13454 delete [] pStream_Buf;
13455 } else {
13456
13457 // Go through the pending requests info and send error request to framework
13458 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13459
13460 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13461
13462 // Send error notify to frameworks
13463 camera3_notify_msg_t notify_msg;
13464 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13465 notify_msg.type = CAMERA3_MSG_ERROR;
13466 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13467 notify_msg.message.error.error_stream = NULL;
13468 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013469 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013470
13471 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13472 if (NULL == pStream_Buf) {
13473 LOGE("No memory for pending buffers array");
13474 return NO_MEMORY;
13475 }
13476 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13477
13478 result.result = NULL;
13479 result.frame_number = req->frame_number;
13480 result.input_buffer = i->input_buffer;
13481 result.num_output_buffers = req->mPendingBufferList.size();
13482 result.output_buffers = pStream_Buf;
13483
13484 size_t index = 0;
13485 for (auto info = req->mPendingBufferList.begin();
13486 info != req->mPendingBufferList.end(); ) {
13487 pStream_Buf[index].acquire_fence = -1;
13488 pStream_Buf[index].release_fence = -1;
13489 pStream_Buf[index].buffer = info->buffer;
13490 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13491 pStream_Buf[index].stream = info->stream;
13492 index++;
13493 // Remove buffer from list
13494 info = req->mPendingBufferList.erase(info);
13495 }
13496
13497 // Remove this request from Map
13498 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13499 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13500 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13501
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013502 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013503 delete [] pStream_Buf;
13504 i = erasePendingRequest(i);
13505 }
13506 }
13507
13508 /* Reset pending frame Drop list and requests list */
13509 mPendingFrameDropList.clear();
13510
13511 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13512 req.mPendingBufferList.clear();
13513 }
13514 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013515 LOGH("Cleared all the pending buffers ");
13516
13517 return rc;
13518}
13519
13520bool QCamera3HardwareInterface::isOnEncoder(
13521 const cam_dimension_t max_viewfinder_size,
13522 uint32_t width, uint32_t height)
13523{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013524 return ((width > (uint32_t)max_viewfinder_size.width) ||
13525 (height > (uint32_t)max_viewfinder_size.height) ||
13526 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13527 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013528}
13529
13530/*===========================================================================
13531 * FUNCTION : setBundleInfo
13532 *
13533 * DESCRIPTION: Set bundle info for all streams that are bundle.
13534 *
13535 * PARAMETERS : None
13536 *
13537 * RETURN : NO_ERROR on success
13538 * Error codes on failure
13539 *==========================================================================*/
13540int32_t QCamera3HardwareInterface::setBundleInfo()
13541{
13542 int32_t rc = NO_ERROR;
13543
13544 if (mChannelHandle) {
13545 cam_bundle_config_t bundleInfo;
13546 memset(&bundleInfo, 0, sizeof(bundleInfo));
13547 rc = mCameraHandle->ops->get_bundle_info(
13548 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13549 if (rc != NO_ERROR) {
13550 LOGE("get_bundle_info failed");
13551 return rc;
13552 }
13553 if (mAnalysisChannel) {
13554 mAnalysisChannel->setBundleInfo(bundleInfo);
13555 }
13556 if (mSupportChannel) {
13557 mSupportChannel->setBundleInfo(bundleInfo);
13558 }
13559 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13560 it != mStreamInfo.end(); it++) {
13561 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13562 channel->setBundleInfo(bundleInfo);
13563 }
13564 if (mRawDumpChannel) {
13565 mRawDumpChannel->setBundleInfo(bundleInfo);
13566 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013567 if (mHdrPlusRawSrcChannel) {
13568 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13569 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013570 }
13571
13572 return rc;
13573}
13574
13575/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013576 * FUNCTION : setInstantAEC
13577 *
13578 * DESCRIPTION: Set Instant AEC related params.
13579 *
13580 * PARAMETERS :
13581 * @meta: CameraMetadata reference
13582 *
13583 * RETURN : NO_ERROR on success
13584 * Error codes on failure
13585 *==========================================================================*/
13586int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13587{
13588 int32_t rc = NO_ERROR;
13589 uint8_t val = 0;
13590 char prop[PROPERTY_VALUE_MAX];
13591
13592 // First try to configure instant AEC from framework metadata
13593 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13594 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13595 }
13596
13597 // If framework did not set this value, try to read from set prop.
13598 if (val == 0) {
13599 memset(prop, 0, sizeof(prop));
13600 property_get("persist.camera.instant.aec", prop, "0");
13601 val = (uint8_t)atoi(prop);
13602 }
13603
13604 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13605 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13606 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13607 mInstantAEC = val;
13608 mInstantAECSettledFrameNumber = 0;
13609 mInstantAecFrameIdxCount = 0;
13610 LOGH("instantAEC value set %d",val);
13611 if (mInstantAEC) {
13612 memset(prop, 0, sizeof(prop));
13613 property_get("persist.camera.ae.instant.bound", prop, "10");
13614 int32_t aec_frame_skip_cnt = atoi(prop);
13615 if (aec_frame_skip_cnt >= 0) {
13616 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13617 } else {
13618 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13619 rc = BAD_VALUE;
13620 }
13621 }
13622 } else {
13623 LOGE("Bad instant aec value set %d", val);
13624 rc = BAD_VALUE;
13625 }
13626 return rc;
13627}
13628
13629/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013630 * FUNCTION : get_num_overall_buffers
13631 *
13632 * DESCRIPTION: Estimate number of pending buffers across all requests.
13633 *
13634 * PARAMETERS : None
13635 *
13636 * RETURN : Number of overall pending buffers
13637 *
13638 *==========================================================================*/
13639uint32_t PendingBuffersMap::get_num_overall_buffers()
13640{
13641 uint32_t sum_buffers = 0;
13642 for (auto &req : mPendingBuffersInRequest) {
13643 sum_buffers += req.mPendingBufferList.size();
13644 }
13645 return sum_buffers;
13646}
13647
13648/*===========================================================================
13649 * FUNCTION : removeBuf
13650 *
13651 * DESCRIPTION: Remove a matching buffer from tracker.
13652 *
13653 * PARAMETERS : @buffer: image buffer for the callback
13654 *
13655 * RETURN : None
13656 *
13657 *==========================================================================*/
13658void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13659{
13660 bool buffer_found = false;
13661 for (auto req = mPendingBuffersInRequest.begin();
13662 req != mPendingBuffersInRequest.end(); req++) {
13663 for (auto k = req->mPendingBufferList.begin();
13664 k != req->mPendingBufferList.end(); k++ ) {
13665 if (k->buffer == buffer) {
13666 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13667 req->frame_number, buffer);
13668 k = req->mPendingBufferList.erase(k);
13669 if (req->mPendingBufferList.empty()) {
13670 // Remove this request from Map
13671 req = mPendingBuffersInRequest.erase(req);
13672 }
13673 buffer_found = true;
13674 break;
13675 }
13676 }
13677 if (buffer_found) {
13678 break;
13679 }
13680 }
13681 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13682 get_num_overall_buffers());
13683}
13684
13685/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013686 * FUNCTION : getBufErrStatus
13687 *
13688 * DESCRIPTION: get buffer error status
13689 *
13690 * PARAMETERS : @buffer: buffer handle
13691 *
13692 * RETURN : Error status
13693 *
13694 *==========================================================================*/
13695int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13696{
13697 for (auto& req : mPendingBuffersInRequest) {
13698 for (auto& k : req.mPendingBufferList) {
13699 if (k.buffer == buffer)
13700 return k.bufStatus;
13701 }
13702 }
13703 return CAMERA3_BUFFER_STATUS_OK;
13704}
13705
13706/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013707 * FUNCTION : setPAAFSupport
13708 *
13709 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13710 * feature mask according to stream type and filter
13711 * arrangement
13712 *
13713 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13714 * @stream_type: stream type
13715 * @filter_arrangement: filter arrangement
13716 *
13717 * RETURN : None
13718 *==========================================================================*/
13719void QCamera3HardwareInterface::setPAAFSupport(
13720 cam_feature_mask_t& feature_mask,
13721 cam_stream_type_t stream_type,
13722 cam_color_filter_arrangement_t filter_arrangement)
13723{
Thierry Strudel3d639192016-09-09 11:52:26 -070013724 switch (filter_arrangement) {
13725 case CAM_FILTER_ARRANGEMENT_RGGB:
13726 case CAM_FILTER_ARRANGEMENT_GRBG:
13727 case CAM_FILTER_ARRANGEMENT_GBRG:
13728 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013729 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13730 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013731 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013732 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13733 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013734 }
13735 break;
13736 case CAM_FILTER_ARRANGEMENT_Y:
13737 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13738 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13739 }
13740 break;
13741 default:
13742 break;
13743 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070013744 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13745 feature_mask, stream_type, filter_arrangement);
13746
13747
Thierry Strudel3d639192016-09-09 11:52:26 -070013748}
13749
13750/*===========================================================================
13751* FUNCTION : getSensorMountAngle
13752*
13753* DESCRIPTION: Retrieve sensor mount angle
13754*
13755* PARAMETERS : None
13756*
13757* RETURN : sensor mount angle in uint32_t
13758*==========================================================================*/
13759uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13760{
13761 return gCamCapability[mCameraId]->sensor_mount_angle;
13762}
13763
13764/*===========================================================================
13765* FUNCTION : getRelatedCalibrationData
13766*
13767* DESCRIPTION: Retrieve related system calibration data
13768*
13769* PARAMETERS : None
13770*
13771* RETURN : Pointer of related system calibration data
13772*==========================================================================*/
13773const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13774{
13775 return (const cam_related_system_calibration_data_t *)
13776 &(gCamCapability[mCameraId]->related_cam_calibration);
13777}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013778
13779/*===========================================================================
13780 * FUNCTION : is60HzZone
13781 *
13782 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13783 *
13784 * PARAMETERS : None
13785 *
13786 * RETURN : True if in 60Hz zone, False otherwise
13787 *==========================================================================*/
13788bool QCamera3HardwareInterface::is60HzZone()
13789{
13790 time_t t = time(NULL);
13791 struct tm lt;
13792
13793 struct tm* r = localtime_r(&t, &lt);
13794
13795 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13796 return true;
13797 else
13798 return false;
13799}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013800
13801/*===========================================================================
13802 * FUNCTION : adjustBlackLevelForCFA
13803 *
13804 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13805 * of bayer CFA (Color Filter Array).
13806 *
13807 * PARAMETERS : @input: black level pattern in the order of RGGB
13808 * @output: black level pattern in the order of CFA
13809 * @color_arrangement: CFA color arrangement
13810 *
13811 * RETURN : None
13812 *==========================================================================*/
13813template<typename T>
13814void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13815 T input[BLACK_LEVEL_PATTERN_CNT],
13816 T output[BLACK_LEVEL_PATTERN_CNT],
13817 cam_color_filter_arrangement_t color_arrangement)
13818{
13819 switch (color_arrangement) {
13820 case CAM_FILTER_ARRANGEMENT_GRBG:
13821 output[0] = input[1];
13822 output[1] = input[0];
13823 output[2] = input[3];
13824 output[3] = input[2];
13825 break;
13826 case CAM_FILTER_ARRANGEMENT_GBRG:
13827 output[0] = input[2];
13828 output[1] = input[3];
13829 output[2] = input[0];
13830 output[3] = input[1];
13831 break;
13832 case CAM_FILTER_ARRANGEMENT_BGGR:
13833 output[0] = input[3];
13834 output[1] = input[2];
13835 output[2] = input[1];
13836 output[3] = input[0];
13837 break;
13838 case CAM_FILTER_ARRANGEMENT_RGGB:
13839 output[0] = input[0];
13840 output[1] = input[1];
13841 output[2] = input[2];
13842 output[3] = input[3];
13843 break;
13844 default:
13845 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13846 break;
13847 }
13848}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013849
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013850void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13851 CameraMetadata &resultMetadata,
13852 std::shared_ptr<metadata_buffer_t> settings)
13853{
13854 if (settings == nullptr) {
13855 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13856 return;
13857 }
13858
13859 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13860 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13861 }
13862
13863 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13864 String8 str((const char *)gps_methods);
13865 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13866 }
13867
13868 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13869 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13870 }
13871
13872 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13873 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13874 }
13875
13876 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13877 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13878 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13879 }
13880
13881 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13882 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13883 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13884 }
13885
13886 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13887 int32_t fwk_thumb_size[2];
13888 fwk_thumb_size[0] = thumb_size->width;
13889 fwk_thumb_size[1] = thumb_size->height;
13890 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13891 }
13892
13893 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13894 uint8_t fwk_intent = intent[0];
13895 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13896 }
13897}
13898
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013899bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13900 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13901{
13902 if (hdrPlusRequest == nullptr) return false;
13903
13904 // Check noise reduction mode is high quality.
13905 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13906 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13907 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013908 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13909 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013910 return false;
13911 }
13912
13913 // Check edge mode is high quality.
13914 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13915 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13916 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13917 return false;
13918 }
13919
13920 if (request.num_output_buffers != 1 ||
13921 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13922 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013923 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13924 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13925 request.output_buffers[0].stream->width,
13926 request.output_buffers[0].stream->height,
13927 request.output_buffers[0].stream->format);
13928 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013929 return false;
13930 }
13931
13932 // Get a YUV buffer from pic channel.
13933 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13934 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13935 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13936 if (res != OK) {
13937 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13938 __FUNCTION__, strerror(-res), res);
13939 return false;
13940 }
13941
13942 pbcamera::StreamBuffer buffer;
13943 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080013944 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013945 buffer.data = yuvBuffer->buffer;
13946 buffer.dataSize = yuvBuffer->frame_len;
13947
13948 pbcamera::CaptureRequest pbRequest;
13949 pbRequest.id = request.frame_number;
13950 pbRequest.outputBuffers.push_back(buffer);
13951
13952 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013953 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013954 if (res != OK) {
13955 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13956 strerror(-res), res);
13957 return false;
13958 }
13959
13960 hdrPlusRequest->yuvBuffer = yuvBuffer;
13961 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13962
13963 return true;
13964}
13965
Chien-Yu Chenee335912017-02-09 17:53:20 -080013966status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13967{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013968 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013969 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13970 return -ENODEV;
13971 }
13972
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070013973 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080013974
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070013975 // Connect to HDR+ service if it's not connected yet.
13976 pthread_mutex_lock(&gCamLock);
13977 if (!gEaselConnected) {
13978 // Connect to HDR+ service
13979 res = gHdrPlusClient->connect(this);
13980 if (res != OK) {
13981 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
13982 strerror(-res), res);
13983 pthread_mutex_unlock(&gCamLock);
13984 return res;
13985 }
13986
13987 // Set static metadata.
13988 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
13989 if (res != OK) {
13990 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
13991 strerror(-res), res);
13992 gHdrPlusClient->disconnect();
13993 pthread_mutex_unlock(&gCamLock);
13994 return res;
13995 }
13996 gEaselConnected = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080013997 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070013998 pthread_mutex_unlock(&gCamLock);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013999
14000 // Configure stream for HDR+.
14001 res = configureHdrPlusStreamsLocked();
14002 if (res != OK) {
14003 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014004 return res;
14005 }
14006
14007 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14008 res = gHdrPlusClient->setZslHdrPlusMode(true);
14009 if (res != OK) {
14010 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014011 return res;
14012 }
14013
14014 mHdrPlusModeEnabled = true;
14015 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14016
14017 return OK;
14018}
14019
14020void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14021{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014022 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014023 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014024 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14025 if (res != OK) {
14026 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14027 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014028 }
14029
14030 mHdrPlusModeEnabled = false;
14031 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14032}
14033
14034status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014035{
14036 pbcamera::InputConfiguration inputConfig;
14037 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14038 status_t res = OK;
14039
14040 // Configure HDR+ client streams.
14041 // Get input config.
14042 if (mHdrPlusRawSrcChannel) {
14043 // HDR+ input buffers will be provided by HAL.
14044 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14045 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14046 if (res != OK) {
14047 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14048 __FUNCTION__, strerror(-res), res);
14049 return res;
14050 }
14051
14052 inputConfig.isSensorInput = false;
14053 } else {
14054 // Sensor MIPI will send data to Easel.
14055 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014056 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014057 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14058 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14059 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14060 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14061 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14062 if (mSensorModeInfo.num_raw_bits != 10) {
14063 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14064 mSensorModeInfo.num_raw_bits);
14065 return BAD_VALUE;
14066 }
14067
14068 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014069 }
14070
14071 // Get output configurations.
14072 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014073 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014074
14075 // Easel may need to output YUV output buffers if mPictureChannel was created.
14076 pbcamera::StreamConfiguration yuvOutputConfig;
14077 if (mPictureChannel != nullptr) {
14078 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14079 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14080 if (res != OK) {
14081 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14082 __FUNCTION__, strerror(-res), res);
14083
14084 return res;
14085 }
14086
14087 outputStreamConfigs.push_back(yuvOutputConfig);
14088 }
14089
14090 // TODO: consider other channels for YUV output buffers.
14091
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014092 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014093 if (res != OK) {
14094 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14095 strerror(-res), res);
14096 return res;
14097 }
14098
14099 return OK;
14100}
14101
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014102void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14103 const camera_metadata_t &resultMetadata) {
14104 if (result != nullptr) {
14105 if (result->outputBuffers.size() != 1) {
14106 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14107 result->outputBuffers.size());
14108 return;
14109 }
14110
14111 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14112 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14113 result->outputBuffers[0].streamId);
14114 return;
14115 }
14116
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014117 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014118 HdrPlusPendingRequest pendingRequest;
14119 {
14120 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14121 auto req = mHdrPlusPendingRequests.find(result->requestId);
14122 pendingRequest = req->second;
14123 }
14124
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014125 // Update the result metadata with the settings of the HDR+ still capture request because
14126 // the result metadata belongs to a ZSL buffer.
14127 CameraMetadata metadata;
14128 metadata = &resultMetadata;
14129 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14130 camera_metadata_t* updatedResultMetadata = metadata.release();
14131
14132 QCamera3PicChannel *picChannel =
14133 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14134
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014135 // Check if dumping HDR+ YUV output is enabled.
14136 char prop[PROPERTY_VALUE_MAX];
14137 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14138 bool dumpYuvOutput = atoi(prop);
14139
14140 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014141 // Dump yuv buffer to a ppm file.
14142 pbcamera::StreamConfiguration outputConfig;
14143 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14144 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14145 if (rc == OK) {
14146 char buf[FILENAME_MAX] = {};
14147 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14148 result->requestId, result->outputBuffers[0].streamId,
14149 outputConfig.image.width, outputConfig.image.height);
14150
14151 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14152 } else {
14153 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14154 __FUNCTION__, strerror(-rc), rc);
14155 }
14156 }
14157
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014158 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14159 auto halMetadata = std::make_shared<metadata_buffer_t>();
14160 clear_metadata_buffer(halMetadata.get());
14161
14162 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14163 // encoding.
14164 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14165 halStreamId, /*minFrameDuration*/0);
14166 if (res == OK) {
14167 // Return the buffer to pic channel for encoding.
14168 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14169 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14170 halMetadata);
14171 } else {
14172 // Return the buffer without encoding.
14173 // TODO: This should not happen but we may want to report an error buffer to camera
14174 // service.
14175 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14176 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14177 strerror(-res), res);
14178 }
14179
14180 // Send HDR+ metadata to framework.
14181 {
14182 pthread_mutex_lock(&mMutex);
14183
14184 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14185 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14186 pthread_mutex_unlock(&mMutex);
14187 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014188
14189 // Remove the HDR+ pending request.
14190 {
14191 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14192 auto req = mHdrPlusPendingRequests.find(result->requestId);
14193 mHdrPlusPendingRequests.erase(req);
14194 }
14195 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014196}
14197
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014198void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14199 // TODO: Handle HDR+ capture failures and send the failure to framework.
14200 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14201 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14202
14203 // Return the buffer to pic channel.
14204 QCamera3PicChannel *picChannel =
14205 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14206 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14207
14208 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014209}
14210
Thierry Strudel3d639192016-09-09 11:52:26 -070014211}; //end namespace qcamera