blob: 1a0ec8a3f3483bb7dd3e64c9b6efd2d2dca5b4d1 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700135// If Easel is connected.
136bool gEaselConnected;
Thierry Strudel3d639192016-09-09 11:52:26 -0700137
138const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
139 {"On", CAM_CDS_MODE_ON},
140 {"Off", CAM_CDS_MODE_OFF},
141 {"Auto",CAM_CDS_MODE_AUTO}
142};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_video_hdr_mode_t,
145 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
146 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
147 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
148};
149
Thierry Strudel54dc9782017-02-15 12:12:10 -0800150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_binning_correction_mode_t,
152 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
153 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
154 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
155};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700156
157const QCamera3HardwareInterface::QCameraMap<
158 camera_metadata_enum_android_ir_mode_t,
159 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
160 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
161 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
162 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
163};
Thierry Strudel3d639192016-09-09 11:52:26 -0700164
165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_control_effect_mode_t,
167 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
168 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
169 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
170 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
171 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
173 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
174 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
175 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
176 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
177};
178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_control_awb_mode_t,
181 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
182 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
183 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
184 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
185 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
186 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
187 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
188 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
189 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
190 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194 camera_metadata_enum_android_control_scene_mode_t,
195 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
196 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
197 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
198 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
199 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
200 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
201 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
202 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
203 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
204 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
205 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
206 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
207 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
208 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
209 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
210 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800211 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
212 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_af_mode_t,
217 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
218 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
219 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
220 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
221 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
222 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
223 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
224 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_color_correction_aberration_mode_t,
229 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
231 CAM_COLOR_CORRECTION_ABERRATION_OFF },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
233 CAM_COLOR_CORRECTION_ABERRATION_FAST },
234 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
235 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_ae_antibanding_mode_t,
240 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
243 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
244 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_control_ae_mode_t,
249 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
250 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
251 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
253 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
254 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_flash_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
260 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
262 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_statistics_face_detect_mode_t,
267 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
269 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
270 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
275 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
277 CAM_FOCUS_UNCALIBRATED },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
279 CAM_FOCUS_APPROXIMATE },
280 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
281 CAM_FOCUS_CALIBRATED }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_state_t,
286 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
287 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
288 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
289};
290
291const int32_t available_thumbnail_sizes[] = {0, 0,
292 176, 144,
293 240, 144,
294 256, 144,
295 240, 160,
296 256, 154,
297 240, 240,
298 320, 240};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_sensor_test_pattern_mode_t,
302 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
307 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
308 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
309};
310
311/* Since there is no mapping for all the options some Android enum are not listed.
312 * Also, the order in this list is important because while mapping from HAL to Android it will
313 * traverse from lower to higher index which means that for HAL values that are map to different
314 * Android values, the traverse logic will select the first one found.
315 */
316const QCamera3HardwareInterface::QCameraMap<
317 camera_metadata_enum_android_sensor_reference_illuminant1_t,
318 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
335};
336
337const QCamera3HardwareInterface::QCameraMap<
338 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
339 { 60, CAM_HFR_MODE_60FPS},
340 { 90, CAM_HFR_MODE_90FPS},
341 { 120, CAM_HFR_MODE_120FPS},
342 { 150, CAM_HFR_MODE_150FPS},
343 { 180, CAM_HFR_MODE_180FPS},
344 { 210, CAM_HFR_MODE_210FPS},
345 { 240, CAM_HFR_MODE_240FPS},
346 { 480, CAM_HFR_MODE_480FPS},
347};
348
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700349const QCamera3HardwareInterface::QCameraMap<
350 qcamera3_ext_instant_aec_mode_t,
351 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
352 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
353 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
354 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
355};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800356
357const QCamera3HardwareInterface::QCameraMap<
358 qcamera3_ext_exposure_meter_mode_t,
359 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
360 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
361 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
362 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
363 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
364 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
365 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
366 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
367};
368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_iso_mode_t,
371 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
372 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
373 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
374 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
375 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
376 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
377 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
378 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
379 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
380};
381
Thierry Strudel3d639192016-09-09 11:52:26 -0700382camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
383 .initialize = QCamera3HardwareInterface::initialize,
384 .configure_streams = QCamera3HardwareInterface::configure_streams,
385 .register_stream_buffers = NULL,
386 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
387 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
388 .get_metadata_vendor_tag_ops = NULL,
389 .dump = QCamera3HardwareInterface::dump,
390 .flush = QCamera3HardwareInterface::flush,
391 .reserved = {0},
392};
393
394// initialise to some default value
395uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
396
397/*===========================================================================
398 * FUNCTION : QCamera3HardwareInterface
399 *
400 * DESCRIPTION: constructor of QCamera3HardwareInterface
401 *
402 * PARAMETERS :
403 * @cameraId : camera ID
404 *
405 * RETURN : none
406 *==========================================================================*/
407QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
408 const camera_module_callbacks_t *callbacks)
409 : mCameraId(cameraId),
410 mCameraHandle(NULL),
411 mCameraInitialized(false),
412 mCallbackOps(NULL),
413 mMetadataChannel(NULL),
414 mPictureChannel(NULL),
415 mRawChannel(NULL),
416 mSupportChannel(NULL),
417 mAnalysisChannel(NULL),
418 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700419 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800421 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800422 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700423 mChannelHandle(0),
424 mFirstConfiguration(true),
425 mFlush(false),
426 mFlushPerf(false),
427 mParamHeap(NULL),
428 mParameters(NULL),
429 mPrevParameters(NULL),
430 m_bIsVideo(false),
431 m_bIs4KVideo(false),
432 m_bEisSupportedSize(false),
433 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800434 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 m_MobicatMask(0),
436 mMinProcessedFrameDuration(0),
437 mMinJpegFrameDuration(0),
438 mMinRawFrameDuration(0),
439 mMetaFrameCount(0U),
440 mUpdateDebugLevel(false),
441 mCallbacks(callbacks),
442 mCaptureIntent(0),
443 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700444 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800445 /* DevCamDebug metadata internal m control*/
446 mDevCamDebugMetaEnable(0),
447 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mBatchSize(0),
449 mToBeQueuedVidBufs(0),
450 mHFRVideoFps(DEFAULT_VIDEO_FPS),
451 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800452 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800453 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mFirstFrameNumberInBatch(0),
455 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700457 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
458 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700459 mInstantAEC(false),
460 mResetInstantAEC(false),
461 mInstantAECSettledFrameNumber(0),
462 mAecSkipDisplayFrameBound(0),
463 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800464 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mLastCustIntentFrmNum(-1),
467 mState(CLOSED),
468 mIsDeviceLinked(false),
469 mIsMainCamera(true),
470 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700471 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800472 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800473 mHdrPlusModeEnabled(false),
474 mIsApInputUsedForHdrPlus(false),
475 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800476 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700477{
478 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mCommon.init(gCamCapability[cameraId]);
480 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700481#ifndef USE_HAL_3_3
482 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
483#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700485#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mCameraDevice.common.close = close_camera_device;
487 mCameraDevice.ops = &mCameraOps;
488 mCameraDevice.priv = this;
489 gCamCapability[cameraId]->version = CAM_HAL_V3;
490 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
491 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
492 gCamCapability[cameraId]->min_num_pp_bufs = 3;
493
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800494 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700495
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800496 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700497 mPendingLiveRequest = 0;
498 mCurrentRequestId = -1;
499 pthread_mutex_init(&mMutex, NULL);
500
501 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
502 mDefaultMetadata[i] = NULL;
503
504 // Getting system props of different kinds
505 char prop[PROPERTY_VALUE_MAX];
506 memset(prop, 0, sizeof(prop));
507 property_get("persist.camera.raw.dump", prop, "0");
508 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800509 property_get("persist.camera.hal3.force.hdr", prop, "0");
510 mForceHdrSnapshot = atoi(prop);
511
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 if (mEnableRawDump)
513 LOGD("Raw dump from Camera HAL enabled");
514
515 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
516 memset(mLdafCalib, 0, sizeof(mLdafCalib));
517
518 memset(prop, 0, sizeof(prop));
519 property_get("persist.camera.tnr.preview", prop, "0");
520 m_bTnrPreview = (uint8_t)atoi(prop);
521
522 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800523 property_get("persist.camera.swtnr.preview", prop, "1");
524 m_bSwTnrPreview = (uint8_t)atoi(prop);
525
526 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 property_get("persist.camera.tnr.video", prop, "0");
528 m_bTnrVideo = (uint8_t)atoi(prop);
529
530 memset(prop, 0, sizeof(prop));
531 property_get("persist.camera.avtimer.debug", prop, "0");
532 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800533 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534
Thierry Strudel54dc9782017-02-15 12:12:10 -0800535 memset(prop, 0, sizeof(prop));
536 property_get("persist.camera.cacmode.disable", prop, "0");
537 m_cacModeDisabled = (uint8_t)atoi(prop);
538
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 //Load and read GPU library.
540 lib_surface_utils = NULL;
541 LINK_get_surface_pixel_alignment = NULL;
542 mSurfaceStridePadding = CAM_PAD_TO_32;
543 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
544 if (lib_surface_utils) {
545 *(void **)&LINK_get_surface_pixel_alignment =
546 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
547 if (LINK_get_surface_pixel_alignment) {
548 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
549 }
550 dlclose(lib_surface_utils);
551 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700552
553 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700554}
555
556/*===========================================================================
557 * FUNCTION : ~QCamera3HardwareInterface
558 *
559 * DESCRIPTION: destructor of QCamera3HardwareInterface
560 *
561 * PARAMETERS : none
562 *
563 * RETURN : none
564 *==========================================================================*/
565QCamera3HardwareInterface::~QCamera3HardwareInterface()
566{
567 LOGD("E");
568
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800569 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700570
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 // Disable power hint and enable the perf lock for close camera
572 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
573 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
574
575 // unlink of dualcam during close camera
576 if (mIsDeviceLinked) {
577 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
578 &m_pDualCamCmdPtr->bundle_info;
579 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
580 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
581 pthread_mutex_lock(&gCamLock);
582
583 if (mIsMainCamera == 1) {
584 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
585 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
586 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
587 // related session id should be session id of linked session
588 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
589 } else {
590 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
591 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
592 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
593 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
594 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800595 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800596 pthread_mutex_unlock(&gCamLock);
597
598 rc = mCameraHandle->ops->set_dual_cam_cmd(
599 mCameraHandle->camera_handle);
600 if (rc < 0) {
601 LOGE("Dualcam: Unlink failed, but still proceed to close");
602 }
603 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700604
605 /* We need to stop all streams before deleting any stream */
606 if (mRawDumpChannel) {
607 mRawDumpChannel->stop();
608 }
609
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700610 if (mHdrPlusRawSrcChannel) {
611 mHdrPlusRawSrcChannel->stop();
612 }
613
Thierry Strudel3d639192016-09-09 11:52:26 -0700614 // NOTE: 'camera3_stream_t *' objects are already freed at
615 // this stage by the framework
616 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
617 it != mStreamInfo.end(); it++) {
618 QCamera3ProcessingChannel *channel = (*it)->channel;
619 if (channel) {
620 channel->stop();
621 }
622 }
623 if (mSupportChannel)
624 mSupportChannel->stop();
625
626 if (mAnalysisChannel) {
627 mAnalysisChannel->stop();
628 }
629 if (mMetadataChannel) {
630 mMetadataChannel->stop();
631 }
632 if (mChannelHandle) {
633 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
634 mChannelHandle);
635 LOGD("stopping channel %d", mChannelHandle);
636 }
637
638 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
639 it != mStreamInfo.end(); it++) {
640 QCamera3ProcessingChannel *channel = (*it)->channel;
641 if (channel)
642 delete channel;
643 free (*it);
644 }
645 if (mSupportChannel) {
646 delete mSupportChannel;
647 mSupportChannel = NULL;
648 }
649
650 if (mAnalysisChannel) {
651 delete mAnalysisChannel;
652 mAnalysisChannel = NULL;
653 }
654 if (mRawDumpChannel) {
655 delete mRawDumpChannel;
656 mRawDumpChannel = NULL;
657 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700658 if (mHdrPlusRawSrcChannel) {
659 delete mHdrPlusRawSrcChannel;
660 mHdrPlusRawSrcChannel = NULL;
661 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700662 if (mDummyBatchChannel) {
663 delete mDummyBatchChannel;
664 mDummyBatchChannel = NULL;
665 }
666
667 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800668 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700669
670 if (mMetadataChannel) {
671 delete mMetadataChannel;
672 mMetadataChannel = NULL;
673 }
674
675 /* Clean up all channels */
676 if (mCameraInitialized) {
677 if(!mFirstConfiguration){
678 //send the last unconfigure
679 cam_stream_size_info_t stream_config_info;
680 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
681 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
682 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800683 m_bIs4KVideo ? 0 :
684 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700685 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700686 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
687 stream_config_info);
688 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
689 if (rc < 0) {
690 LOGE("set_parms failed for unconfigure");
691 }
692 }
693 deinitParameters();
694 }
695
696 if (mChannelHandle) {
697 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
698 mChannelHandle);
699 LOGH("deleting channel %d", mChannelHandle);
700 mChannelHandle = 0;
701 }
702
703 if (mState != CLOSED)
704 closeCamera();
705
706 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
707 req.mPendingBufferList.clear();
708 }
709 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700710 for (pendingRequestIterator i = mPendingRequestsList.begin();
711 i != mPendingRequestsList.end();) {
712 i = erasePendingRequest(i);
713 }
714 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
715 if (mDefaultMetadata[i])
716 free_camera_metadata(mDefaultMetadata[i]);
717
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800718 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 pthread_cond_destroy(&mRequestCond);
721
722 pthread_cond_destroy(&mBuffersCond);
723
724 pthread_mutex_destroy(&mMutex);
725 LOGD("X");
726}
727
728/*===========================================================================
729 * FUNCTION : erasePendingRequest
730 *
731 * DESCRIPTION: function to erase a desired pending request after freeing any
732 * allocated memory
733 *
734 * PARAMETERS :
735 * @i : iterator pointing to pending request to be erased
736 *
737 * RETURN : iterator pointing to the next request
738 *==========================================================================*/
739QCamera3HardwareInterface::pendingRequestIterator
740 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
741{
742 if (i->input_buffer != NULL) {
743 free(i->input_buffer);
744 i->input_buffer = NULL;
745 }
746 if (i->settings != NULL)
747 free_camera_metadata((camera_metadata_t*)i->settings);
748 return mPendingRequestsList.erase(i);
749}
750
751/*===========================================================================
752 * FUNCTION : camEvtHandle
753 *
754 * DESCRIPTION: Function registered to mm-camera-interface to handle events
755 *
756 * PARAMETERS :
757 * @camera_handle : interface layer camera handle
758 * @evt : ptr to event
759 * @user_data : user data ptr
760 *
761 * RETURN : none
762 *==========================================================================*/
763void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
764 mm_camera_event_t *evt,
765 void *user_data)
766{
767 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
768 if (obj && evt) {
769 switch(evt->server_event_type) {
770 case CAM_EVENT_TYPE_DAEMON_DIED:
771 pthread_mutex_lock(&obj->mMutex);
772 obj->mState = ERROR;
773 pthread_mutex_unlock(&obj->mMutex);
774 LOGE("Fatal, camera daemon died");
775 break;
776
777 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
778 LOGD("HAL got request pull from Daemon");
779 pthread_mutex_lock(&obj->mMutex);
780 obj->mWokenUpByDaemon = true;
781 obj->unblockRequestIfNecessary();
782 pthread_mutex_unlock(&obj->mMutex);
783 break;
784
785 default:
786 LOGW("Warning: Unhandled event %d",
787 evt->server_event_type);
788 break;
789 }
790 } else {
791 LOGE("NULL user_data/evt");
792 }
793}
794
795/*===========================================================================
796 * FUNCTION : openCamera
797 *
798 * DESCRIPTION: open camera
799 *
800 * PARAMETERS :
801 * @hw_device : double ptr for camera device struct
802 *
803 * RETURN : int32_t type of status
804 * NO_ERROR -- success
805 * none-zero failure code
806 *==========================================================================*/
807int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
808{
809 int rc = 0;
810 if (mState != CLOSED) {
811 *hw_device = NULL;
812 return PERMISSION_DENIED;
813 }
814
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800815 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700816 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
817 mCameraId);
818
819 rc = openCamera();
820 if (rc == 0) {
821 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800822 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700823 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800824 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700825
Thierry Strudel3d639192016-09-09 11:52:26 -0700826 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
827 mCameraId, rc);
828
829 if (rc == NO_ERROR) {
830 mState = OPENED;
831 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800832
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700833 if (gHdrPlusClient != nullptr) {
834 mIsApInputUsedForHdrPlus =
835 property_get_bool("persist.camera.hdrplus.apinput", false);
836 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
837 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
838 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800839
Thierry Strudel3d639192016-09-09 11:52:26 -0700840 return rc;
841}
842
843/*===========================================================================
844 * FUNCTION : openCamera
845 *
846 * DESCRIPTION: open camera
847 *
848 * PARAMETERS : none
849 *
850 * RETURN : int32_t type of status
851 * NO_ERROR -- success
852 * none-zero failure code
853 *==========================================================================*/
854int QCamera3HardwareInterface::openCamera()
855{
856 int rc = 0;
857 char value[PROPERTY_VALUE_MAX];
858
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800859 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700860 if (mCameraHandle) {
861 LOGE("Failure: Camera already opened");
862 return ALREADY_EXISTS;
863 }
864
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800865 if (gHdrPlusClient != nullptr) {
866 rc = gHdrPlusClient->resumeEasel();
867 if (rc != 0) {
868 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
869 return rc;
870 }
871 }
872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
874 if (rc < 0) {
875 LOGE("Failed to reserve flash for camera id: %d",
876 mCameraId);
877 return UNKNOWN_ERROR;
878 }
879
880 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
881 if (rc) {
882 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
883 return rc;
884 }
885
886 if (!mCameraHandle) {
887 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
888 return -ENODEV;
889 }
890
891 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
892 camEvtHandle, (void *)this);
893
894 if (rc < 0) {
895 LOGE("Error, failed to register event callback");
896 /* Not closing camera here since it is already handled in destructor */
897 return FAILED_TRANSACTION;
898 }
899
900 mExifParams.debug_params =
901 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
902 if (mExifParams.debug_params) {
903 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
904 } else {
905 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
906 return NO_MEMORY;
907 }
908 mFirstConfiguration = true;
909
910 //Notify display HAL that a camera session is active.
911 //But avoid calling the same during bootup because camera service might open/close
912 //cameras at boot time during its initialization and display service will also internally
913 //wait for camera service to initialize first while calling this display API, resulting in a
914 //deadlock situation. Since boot time camera open/close calls are made only to fetch
915 //capabilities, no need of this display bw optimization.
916 //Use "service.bootanim.exit" property to know boot status.
917 property_get("service.bootanim.exit", value, "0");
918 if (atoi(value) == 1) {
919 pthread_mutex_lock(&gCamLock);
920 if (gNumCameraSessions++ == 0) {
921 setCameraLaunchStatus(true);
922 }
923 pthread_mutex_unlock(&gCamLock);
924 }
925
926 //fill the session id needed while linking dual cam
927 pthread_mutex_lock(&gCamLock);
928 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
929 &sessionId[mCameraId]);
930 pthread_mutex_unlock(&gCamLock);
931
932 if (rc < 0) {
933 LOGE("Error, failed to get sessiion id");
934 return UNKNOWN_ERROR;
935 } else {
936 //Allocate related cam sync buffer
937 //this is needed for the payload that goes along with bundling cmd for related
938 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700939 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
940 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700941 if(rc != OK) {
942 rc = NO_MEMORY;
943 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
944 return NO_MEMORY;
945 }
946
947 //Map memory for related cam sync buffer
948 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700949 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
950 m_pDualCamCmdHeap->getFd(0),
951 sizeof(cam_dual_camera_cmd_info_t),
952 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700953 if(rc < 0) {
954 LOGE("Dualcam: failed to map Related cam sync buffer");
955 rc = FAILED_TRANSACTION;
956 return NO_MEMORY;
957 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700958 m_pDualCamCmdPtr =
959 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700960 }
961
962 LOGH("mCameraId=%d",mCameraId);
963
964 return NO_ERROR;
965}
966
967/*===========================================================================
968 * FUNCTION : closeCamera
969 *
970 * DESCRIPTION: close camera
971 *
972 * PARAMETERS : none
973 *
974 * RETURN : int32_t type of status
975 * NO_ERROR -- success
976 * none-zero failure code
977 *==========================================================================*/
978int QCamera3HardwareInterface::closeCamera()
979{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800980 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700981 int rc = NO_ERROR;
982 char value[PROPERTY_VALUE_MAX];
983
984 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
985 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700986
987 // unmap memory for related cam sync buffer
988 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800989 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700990 if (NULL != m_pDualCamCmdHeap) {
991 m_pDualCamCmdHeap->deallocate();
992 delete m_pDualCamCmdHeap;
993 m_pDualCamCmdHeap = NULL;
994 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700995 }
996
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
998 mCameraHandle = NULL;
999
1000 //reset session id to some invalid id
1001 pthread_mutex_lock(&gCamLock);
1002 sessionId[mCameraId] = 0xDEADBEEF;
1003 pthread_mutex_unlock(&gCamLock);
1004
1005 //Notify display HAL that there is no active camera session
1006 //but avoid calling the same during bootup. Refer to openCamera
1007 //for more details.
1008 property_get("service.bootanim.exit", value, "0");
1009 if (atoi(value) == 1) {
1010 pthread_mutex_lock(&gCamLock);
1011 if (--gNumCameraSessions == 0) {
1012 setCameraLaunchStatus(false);
1013 }
1014 pthread_mutex_unlock(&gCamLock);
1015 }
1016
Thierry Strudel3d639192016-09-09 11:52:26 -07001017 if (mExifParams.debug_params) {
1018 free(mExifParams.debug_params);
1019 mExifParams.debug_params = NULL;
1020 }
1021 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1022 LOGW("Failed to release flash for camera id: %d",
1023 mCameraId);
1024 }
1025 mState = CLOSED;
1026 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1027 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001028
1029 if (gHdrPlusClient != nullptr) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001030 // Disable HDR+ mode.
1031 disableHdrPlusModeLocked();
1032 // Disconnect Easel if it's connected.
1033 pthread_mutex_lock(&gCamLock);
1034 if (gEaselConnected) {
1035 gHdrPlusClient->disconnect();
1036 gEaselConnected = false;
1037 }
1038 pthread_mutex_unlock(&gCamLock);
1039
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001040 rc = gHdrPlusClient->suspendEasel();
1041 if (rc != 0) {
1042 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1043 }
1044 }
1045
Thierry Strudel3d639192016-09-09 11:52:26 -07001046 return rc;
1047}
1048
1049/*===========================================================================
1050 * FUNCTION : initialize
1051 *
1052 * DESCRIPTION: Initialize frameworks callback functions
1053 *
1054 * PARAMETERS :
1055 * @callback_ops : callback function to frameworks
1056 *
1057 * RETURN :
1058 *
1059 *==========================================================================*/
1060int QCamera3HardwareInterface::initialize(
1061 const struct camera3_callback_ops *callback_ops)
1062{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001063 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001064 int rc;
1065
1066 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1067 pthread_mutex_lock(&mMutex);
1068
1069 // Validate current state
1070 switch (mState) {
1071 case OPENED:
1072 /* valid state */
1073 break;
1074 default:
1075 LOGE("Invalid state %d", mState);
1076 rc = -ENODEV;
1077 goto err1;
1078 }
1079
1080 rc = initParameters();
1081 if (rc < 0) {
1082 LOGE("initParamters failed %d", rc);
1083 goto err1;
1084 }
1085 mCallbackOps = callback_ops;
1086
1087 mChannelHandle = mCameraHandle->ops->add_channel(
1088 mCameraHandle->camera_handle, NULL, NULL, this);
1089 if (mChannelHandle == 0) {
1090 LOGE("add_channel failed");
1091 rc = -ENOMEM;
1092 pthread_mutex_unlock(&mMutex);
1093 return rc;
1094 }
1095
1096 pthread_mutex_unlock(&mMutex);
1097 mCameraInitialized = true;
1098 mState = INITIALIZED;
1099 LOGI("X");
1100 return 0;
1101
1102err1:
1103 pthread_mutex_unlock(&mMutex);
1104 return rc;
1105}
1106
1107/*===========================================================================
1108 * FUNCTION : validateStreamDimensions
1109 *
1110 * DESCRIPTION: Check if the configuration requested are those advertised
1111 *
1112 * PARAMETERS :
1113 * @stream_list : streams to be configured
1114 *
1115 * RETURN :
1116 *
1117 *==========================================================================*/
1118int QCamera3HardwareInterface::validateStreamDimensions(
1119 camera3_stream_configuration_t *streamList)
1120{
1121 int rc = NO_ERROR;
1122 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001123 uint32_t depthWidth =
1124 gCamCapability[mCameraId]->active_array_size.width;
1125 uint32_t depthHeight =
1126 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001127
1128 camera3_stream_t *inputStream = NULL;
1129 /*
1130 * Loop through all streams to find input stream if it exists*
1131 */
1132 for (size_t i = 0; i< streamList->num_streams; i++) {
1133 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1134 if (inputStream != NULL) {
1135 LOGE("Error, Multiple input streams requested");
1136 return -EINVAL;
1137 }
1138 inputStream = streamList->streams[i];
1139 }
1140 }
1141 /*
1142 * Loop through all streams requested in configuration
1143 * Check if unsupported sizes have been requested on any of them
1144 */
1145 for (size_t j = 0; j < streamList->num_streams; j++) {
1146 bool sizeFound = false;
1147 camera3_stream_t *newStream = streamList->streams[j];
1148
1149 uint32_t rotatedHeight = newStream->height;
1150 uint32_t rotatedWidth = newStream->width;
1151 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1152 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1153 rotatedHeight = newStream->width;
1154 rotatedWidth = newStream->height;
1155 }
1156
1157 /*
1158 * Sizes are different for each type of stream format check against
1159 * appropriate table.
1160 */
1161 switch (newStream->format) {
1162 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1163 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1164 case HAL_PIXEL_FORMAT_RAW10:
1165 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1166 for (size_t i = 0; i < count; i++) {
1167 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1168 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1169 sizeFound = true;
1170 break;
1171 }
1172 }
1173 break;
1174 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001175 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1176 //As per spec. depth cloud should be sample count / 16
1177 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1178 if ((depthSamplesCount == newStream->width) &&
1179 (1 == newStream->height)) {
1180 sizeFound = true;
1181 }
1182 break;
1183 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001184 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1185 /* Verify set size against generated sizes table */
1186 for (size_t i = 0; i < count; i++) {
1187 if (((int32_t)rotatedWidth ==
1188 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1189 ((int32_t)rotatedHeight ==
1190 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1191 sizeFound = true;
1192 break;
1193 }
1194 }
1195 break;
1196 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1197 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1198 default:
1199 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1200 || newStream->stream_type == CAMERA3_STREAM_INPUT
1201 || IS_USAGE_ZSL(newStream->usage)) {
1202 if (((int32_t)rotatedWidth ==
1203 gCamCapability[mCameraId]->active_array_size.width) &&
1204 ((int32_t)rotatedHeight ==
1205 gCamCapability[mCameraId]->active_array_size.height)) {
1206 sizeFound = true;
1207 break;
1208 }
1209 /* We could potentially break here to enforce ZSL stream
1210 * set from frameworks always is full active array size
1211 * but it is not clear from the spc if framework will always
1212 * follow that, also we have logic to override to full array
1213 * size, so keeping the logic lenient at the moment
1214 */
1215 }
1216 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1217 MAX_SIZES_CNT);
1218 for (size_t i = 0; i < count; i++) {
1219 if (((int32_t)rotatedWidth ==
1220 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1221 ((int32_t)rotatedHeight ==
1222 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 } /* End of switch(newStream->format) */
1229
1230 /* We error out even if a single stream has unsupported size set */
1231 if (!sizeFound) {
1232 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1233 rotatedWidth, rotatedHeight, newStream->format,
1234 gCamCapability[mCameraId]->active_array_size.width,
1235 gCamCapability[mCameraId]->active_array_size.height);
1236 rc = -EINVAL;
1237 break;
1238 }
1239 } /* End of for each stream */
1240 return rc;
1241}
1242
1243/*==============================================================================
1244 * FUNCTION : isSupportChannelNeeded
1245 *
1246 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1247 *
1248 * PARAMETERS :
1249 * @stream_list : streams to be configured
1250 * @stream_config_info : the config info for streams to be configured
1251 *
1252 * RETURN : Boolen true/false decision
1253 *
1254 *==========================================================================*/
1255bool QCamera3HardwareInterface::isSupportChannelNeeded(
1256 camera3_stream_configuration_t *streamList,
1257 cam_stream_size_info_t stream_config_info)
1258{
1259 uint32_t i;
1260 bool pprocRequested = false;
1261 /* Check for conditions where PProc pipeline does not have any streams*/
1262 for (i = 0; i < stream_config_info.num_streams; i++) {
1263 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1264 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1265 pprocRequested = true;
1266 break;
1267 }
1268 }
1269
1270 if (pprocRequested == false )
1271 return true;
1272
1273 /* Dummy stream needed if only raw or jpeg streams present */
1274 for (i = 0; i < streamList->num_streams; i++) {
1275 switch(streamList->streams[i]->format) {
1276 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1277 case HAL_PIXEL_FORMAT_RAW10:
1278 case HAL_PIXEL_FORMAT_RAW16:
1279 case HAL_PIXEL_FORMAT_BLOB:
1280 break;
1281 default:
1282 return false;
1283 }
1284 }
1285 return true;
1286}
1287
1288/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001289 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001290 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001291 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001292 *
1293 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001294 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001295 *
1296 * RETURN : int32_t type of status
1297 * NO_ERROR -- success
1298 * none-zero failure code
1299 *
1300 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001301int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001302{
1303 int32_t rc = NO_ERROR;
1304
1305 cam_dimension_t max_dim = {0, 0};
1306 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1307 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1308 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1309 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1310 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1311 }
1312
1313 clear_metadata_buffer(mParameters);
1314
1315 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1316 max_dim);
1317 if (rc != NO_ERROR) {
1318 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1319 return rc;
1320 }
1321
1322 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1323 if (rc != NO_ERROR) {
1324 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1325 return rc;
1326 }
1327
1328 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001329 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001330
1331 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1332 mParameters);
1333 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001334 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001335 return rc;
1336 }
1337
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001338 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001339 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1340 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1341 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1342 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1343 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001344
1345 return rc;
1346}
1347
1348/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001349 * FUNCTION : addToPPFeatureMask
1350 *
1351 * DESCRIPTION: add additional features to pp feature mask based on
1352 * stream type and usecase
1353 *
1354 * PARAMETERS :
1355 * @stream_format : stream type for feature mask
1356 * @stream_idx : stream idx within postprocess_mask list to change
1357 *
1358 * RETURN : NULL
1359 *
1360 *==========================================================================*/
1361void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1362 uint32_t stream_idx)
1363{
1364 char feature_mask_value[PROPERTY_VALUE_MAX];
1365 cam_feature_mask_t feature_mask;
1366 int args_converted;
1367 int property_len;
1368
1369 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001370#ifdef _LE_CAMERA_
1371 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1372 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1373 property_len = property_get("persist.camera.hal3.feature",
1374 feature_mask_value, swtnr_feature_mask_value);
1375#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001376 property_len = property_get("persist.camera.hal3.feature",
1377 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001378#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001379 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1380 (feature_mask_value[1] == 'x')) {
1381 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1382 } else {
1383 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1384 }
1385 if (1 != args_converted) {
1386 feature_mask = 0;
1387 LOGE("Wrong feature mask %s", feature_mask_value);
1388 return;
1389 }
1390
1391 switch (stream_format) {
1392 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1393 /* Add LLVD to pp feature mask only if video hint is enabled */
1394 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1395 mStreamConfigInfo.postprocess_mask[stream_idx]
1396 |= CAM_QTI_FEATURE_SW_TNR;
1397 LOGH("Added SW TNR to pp feature mask");
1398 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1399 mStreamConfigInfo.postprocess_mask[stream_idx]
1400 |= CAM_QCOM_FEATURE_LLVD;
1401 LOGH("Added LLVD SeeMore to pp feature mask");
1402 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001403 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1404 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1405 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1406 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001407 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1408 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1409 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1410 CAM_QTI_FEATURE_BINNING_CORRECTION;
1411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001412 break;
1413 }
1414 default:
1415 break;
1416 }
1417 LOGD("PP feature mask %llx",
1418 mStreamConfigInfo.postprocess_mask[stream_idx]);
1419}
1420
1421/*==============================================================================
1422 * FUNCTION : updateFpsInPreviewBuffer
1423 *
1424 * DESCRIPTION: update FPS information in preview buffer.
1425 *
1426 * PARAMETERS :
1427 * @metadata : pointer to metadata buffer
1428 * @frame_number: frame_number to look for in pending buffer list
1429 *
1430 * RETURN : None
1431 *
1432 *==========================================================================*/
1433void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1434 uint32_t frame_number)
1435{
1436 // Mark all pending buffers for this particular request
1437 // with corresponding framerate information
1438 for (List<PendingBuffersInRequest>::iterator req =
1439 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1440 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1441 for(List<PendingBufferInfo>::iterator j =
1442 req->mPendingBufferList.begin();
1443 j != req->mPendingBufferList.end(); j++) {
1444 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1445 if ((req->frame_number == frame_number) &&
1446 (channel->getStreamTypeMask() &
1447 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1448 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1449 CAM_INTF_PARM_FPS_RANGE, metadata) {
1450 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1451 struct private_handle_t *priv_handle =
1452 (struct private_handle_t *)(*(j->buffer));
1453 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1454 }
1455 }
1456 }
1457 }
1458}
1459
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001460/*==============================================================================
1461 * FUNCTION : updateTimeStampInPendingBuffers
1462 *
1463 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1464 * of a frame number
1465 *
1466 * PARAMETERS :
1467 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1468 * @timestamp : timestamp to be set
1469 *
1470 * RETURN : None
1471 *
1472 *==========================================================================*/
1473void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1474 uint32_t frameNumber, nsecs_t timestamp)
1475{
1476 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1477 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1478 if (req->frame_number != frameNumber)
1479 continue;
1480
1481 for (auto k = req->mPendingBufferList.begin();
1482 k != req->mPendingBufferList.end(); k++ ) {
1483 struct private_handle_t *priv_handle =
1484 (struct private_handle_t *) (*(k->buffer));
1485 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1486 }
1487 }
1488 return;
1489}
1490
Thierry Strudel3d639192016-09-09 11:52:26 -07001491/*===========================================================================
1492 * FUNCTION : configureStreams
1493 *
1494 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1495 * and output streams.
1496 *
1497 * PARAMETERS :
1498 * @stream_list : streams to be configured
1499 *
1500 * RETURN :
1501 *
1502 *==========================================================================*/
1503int QCamera3HardwareInterface::configureStreams(
1504 camera3_stream_configuration_t *streamList)
1505{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001506 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001507 int rc = 0;
1508
1509 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001510 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001512 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001513
1514 return rc;
1515}
1516
1517/*===========================================================================
1518 * FUNCTION : configureStreamsPerfLocked
1519 *
1520 * DESCRIPTION: configureStreams while perfLock is held.
1521 *
1522 * PARAMETERS :
1523 * @stream_list : streams to be configured
1524 *
1525 * RETURN : int32_t type of status
1526 * NO_ERROR -- success
1527 * none-zero failure code
1528 *==========================================================================*/
1529int QCamera3HardwareInterface::configureStreamsPerfLocked(
1530 camera3_stream_configuration_t *streamList)
1531{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001532 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 int rc = 0;
1534
1535 // Sanity check stream_list
1536 if (streamList == NULL) {
1537 LOGE("NULL stream configuration");
1538 return BAD_VALUE;
1539 }
1540 if (streamList->streams == NULL) {
1541 LOGE("NULL stream list");
1542 return BAD_VALUE;
1543 }
1544
1545 if (streamList->num_streams < 1) {
1546 LOGE("Bad number of streams requested: %d",
1547 streamList->num_streams);
1548 return BAD_VALUE;
1549 }
1550
1551 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1552 LOGE("Maximum number of streams %d exceeded: %d",
1553 MAX_NUM_STREAMS, streamList->num_streams);
1554 return BAD_VALUE;
1555 }
1556
1557 mOpMode = streamList->operation_mode;
1558 LOGD("mOpMode: %d", mOpMode);
1559
1560 /* first invalidate all the steams in the mStreamList
1561 * if they appear again, they will be validated */
1562 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1563 it != mStreamInfo.end(); it++) {
1564 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1565 if (channel) {
1566 channel->stop();
1567 }
1568 (*it)->status = INVALID;
1569 }
1570
1571 if (mRawDumpChannel) {
1572 mRawDumpChannel->stop();
1573 delete mRawDumpChannel;
1574 mRawDumpChannel = NULL;
1575 }
1576
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001577 if (mHdrPlusRawSrcChannel) {
1578 mHdrPlusRawSrcChannel->stop();
1579 delete mHdrPlusRawSrcChannel;
1580 mHdrPlusRawSrcChannel = NULL;
1581 }
1582
Thierry Strudel3d639192016-09-09 11:52:26 -07001583 if (mSupportChannel)
1584 mSupportChannel->stop();
1585
1586 if (mAnalysisChannel) {
1587 mAnalysisChannel->stop();
1588 }
1589 if (mMetadataChannel) {
1590 /* If content of mStreamInfo is not 0, there is metadata stream */
1591 mMetadataChannel->stop();
1592 }
1593 if (mChannelHandle) {
1594 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1595 mChannelHandle);
1596 LOGD("stopping channel %d", mChannelHandle);
1597 }
1598
1599 pthread_mutex_lock(&mMutex);
1600
1601 // Check state
1602 switch (mState) {
1603 case INITIALIZED:
1604 case CONFIGURED:
1605 case STARTED:
1606 /* valid state */
1607 break;
1608 default:
1609 LOGE("Invalid state %d", mState);
1610 pthread_mutex_unlock(&mMutex);
1611 return -ENODEV;
1612 }
1613
1614 /* Check whether we have video stream */
1615 m_bIs4KVideo = false;
1616 m_bIsVideo = false;
1617 m_bEisSupportedSize = false;
1618 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001619 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001621 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 uint32_t videoWidth = 0U;
1623 uint32_t videoHeight = 0U;
1624 size_t rawStreamCnt = 0;
1625 size_t stallStreamCnt = 0;
1626 size_t processedStreamCnt = 0;
1627 // Number of streams on ISP encoder path
1628 size_t numStreamsOnEncoder = 0;
1629 size_t numYuv888OnEncoder = 0;
1630 bool bYuv888OverrideJpeg = false;
1631 cam_dimension_t largeYuv888Size = {0, 0};
1632 cam_dimension_t maxViewfinderSize = {0, 0};
1633 bool bJpegExceeds4K = false;
1634 bool bJpegOnEncoder = false;
1635 bool bUseCommonFeatureMask = false;
1636 cam_feature_mask_t commonFeatureMask = 0;
1637 bool bSmallJpegSize = false;
1638 uint32_t width_ratio;
1639 uint32_t height_ratio;
1640 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1641 camera3_stream_t *inputStream = NULL;
1642 bool isJpeg = false;
1643 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001644 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001645
1646 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1647
1648 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 uint8_t eis_prop_set;
1650 uint32_t maxEisWidth = 0;
1651 uint32_t maxEisHeight = 0;
1652
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001653 // Initialize all instant AEC related variables
1654 mInstantAEC = false;
1655 mResetInstantAEC = false;
1656 mInstantAECSettledFrameNumber = 0;
1657 mAecSkipDisplayFrameBound = 0;
1658 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001659 mCurrFeatureState = 0;
1660 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001661
Thierry Strudel3d639192016-09-09 11:52:26 -07001662 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1663
1664 size_t count = IS_TYPE_MAX;
1665 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1666 for (size_t i = 0; i < count; i++) {
1667 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001668 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1669 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001670 break;
1671 }
1672 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001673
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001674 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001675 maxEisWidth = MAX_EIS_WIDTH;
1676 maxEisHeight = MAX_EIS_HEIGHT;
1677 }
1678
1679 /* EIS setprop control */
1680 char eis_prop[PROPERTY_VALUE_MAX];
1681 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001682 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001683 eis_prop_set = (uint8_t)atoi(eis_prop);
1684
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001685 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001686 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1687
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001688 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1689 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001690
Thierry Strudel3d639192016-09-09 11:52:26 -07001691 /* stream configurations */
1692 for (size_t i = 0; i < streamList->num_streams; i++) {
1693 camera3_stream_t *newStream = streamList->streams[i];
1694 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1695 "height = %d, rotation = %d, usage = 0x%x",
1696 i, newStream->stream_type, newStream->format,
1697 newStream->width, newStream->height, newStream->rotation,
1698 newStream->usage);
1699 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1700 newStream->stream_type == CAMERA3_STREAM_INPUT){
1701 isZsl = true;
1702 }
1703 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1704 inputStream = newStream;
1705 }
1706
Emilian Peev7650c122017-01-19 08:24:33 -08001707 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1708 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 isJpeg = true;
1710 jpegSize.width = newStream->width;
1711 jpegSize.height = newStream->height;
1712 if (newStream->width > VIDEO_4K_WIDTH ||
1713 newStream->height > VIDEO_4K_HEIGHT)
1714 bJpegExceeds4K = true;
1715 }
1716
1717 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1718 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1719 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001720 // In HAL3 we can have multiple different video streams.
1721 // The variables video width and height are used below as
1722 // dimensions of the biggest of them
1723 if (videoWidth < newStream->width ||
1724 videoHeight < newStream->height) {
1725 videoWidth = newStream->width;
1726 videoHeight = newStream->height;
1727 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001728 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1729 (VIDEO_4K_HEIGHT <= newStream->height)) {
1730 m_bIs4KVideo = true;
1731 }
1732 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1733 (newStream->height <= maxEisHeight);
1734 }
1735 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1736 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1737 switch (newStream->format) {
1738 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001739 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1740 depthPresent = true;
1741 break;
1742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001743 stallStreamCnt++;
1744 if (isOnEncoder(maxViewfinderSize, newStream->width,
1745 newStream->height)) {
1746 numStreamsOnEncoder++;
1747 bJpegOnEncoder = true;
1748 }
1749 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1750 newStream->width);
1751 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1752 newStream->height);;
1753 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1754 "FATAL: max_downscale_factor cannot be zero and so assert");
1755 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1756 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1757 LOGH("Setting small jpeg size flag to true");
1758 bSmallJpegSize = true;
1759 }
1760 break;
1761 case HAL_PIXEL_FORMAT_RAW10:
1762 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1763 case HAL_PIXEL_FORMAT_RAW16:
1764 rawStreamCnt++;
1765 break;
1766 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1767 processedStreamCnt++;
1768 if (isOnEncoder(maxViewfinderSize, newStream->width,
1769 newStream->height)) {
1770 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1771 !IS_USAGE_ZSL(newStream->usage)) {
1772 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1773 }
1774 numStreamsOnEncoder++;
1775 }
1776 break;
1777 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1778 processedStreamCnt++;
1779 if (isOnEncoder(maxViewfinderSize, newStream->width,
1780 newStream->height)) {
1781 // If Yuv888 size is not greater than 4K, set feature mask
1782 // to SUPERSET so that it support concurrent request on
1783 // YUV and JPEG.
1784 if (newStream->width <= VIDEO_4K_WIDTH &&
1785 newStream->height <= VIDEO_4K_HEIGHT) {
1786 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1787 }
1788 numStreamsOnEncoder++;
1789 numYuv888OnEncoder++;
1790 largeYuv888Size.width = newStream->width;
1791 largeYuv888Size.height = newStream->height;
1792 }
1793 break;
1794 default:
1795 processedStreamCnt++;
1796 if (isOnEncoder(maxViewfinderSize, newStream->width,
1797 newStream->height)) {
1798 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1799 numStreamsOnEncoder++;
1800 }
1801 break;
1802 }
1803
1804 }
1805 }
1806
1807 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1808 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1809 !m_bIsVideo) {
1810 m_bEisEnable = false;
1811 }
1812
Thierry Strudel54dc9782017-02-15 12:12:10 -08001813 uint8_t forceEnableTnr = 0;
1814 char tnr_prop[PROPERTY_VALUE_MAX];
1815 memset(tnr_prop, 0, sizeof(tnr_prop));
1816 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1817 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* Logic to enable/disable TNR based on specific config size/etc.*/
1820 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1821 ((videoWidth == 1920 && videoHeight == 1080) ||
1822 (videoWidth == 1280 && videoHeight == 720)) &&
1823 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1824 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001825 else if (forceEnableTnr)
1826 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001827
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001828 char videoHdrProp[PROPERTY_VALUE_MAX];
1829 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1830 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1831 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1832
1833 if (hdr_mode_prop == 1 && m_bIsVideo &&
1834 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1835 m_bVideoHdrEnabled = true;
1836 else
1837 m_bVideoHdrEnabled = false;
1838
1839
Thierry Strudel3d639192016-09-09 11:52:26 -07001840 /* Check if num_streams is sane */
1841 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1842 rawStreamCnt > MAX_RAW_STREAMS ||
1843 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1844 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1845 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1846 pthread_mutex_unlock(&mMutex);
1847 return -EINVAL;
1848 }
1849 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001850 if (isZsl && m_bIs4KVideo) {
1851 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001852 pthread_mutex_unlock(&mMutex);
1853 return -EINVAL;
1854 }
1855 /* Check if stream sizes are sane */
1856 if (numStreamsOnEncoder > 2) {
1857 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1858 pthread_mutex_unlock(&mMutex);
1859 return -EINVAL;
1860 } else if (1 < numStreamsOnEncoder){
1861 bUseCommonFeatureMask = true;
1862 LOGH("Multiple streams above max viewfinder size, common mask needed");
1863 }
1864
1865 /* Check if BLOB size is greater than 4k in 4k recording case */
1866 if (m_bIs4KVideo && bJpegExceeds4K) {
1867 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1868 pthread_mutex_unlock(&mMutex);
1869 return -EINVAL;
1870 }
1871
Emilian Peev7650c122017-01-19 08:24:33 -08001872 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1873 depthPresent) {
1874 LOGE("HAL doesn't support depth streams in HFR mode!");
1875 pthread_mutex_unlock(&mMutex);
1876 return -EINVAL;
1877 }
1878
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1880 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1881 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1882 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1883 // configurations:
1884 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1885 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1886 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1887 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1888 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1889 __func__);
1890 pthread_mutex_unlock(&mMutex);
1891 return -EINVAL;
1892 }
1893
1894 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1895 // the YUV stream's size is greater or equal to the JPEG size, set common
1896 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1897 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1898 jpegSize.width, jpegSize.height) &&
1899 largeYuv888Size.width > jpegSize.width &&
1900 largeYuv888Size.height > jpegSize.height) {
1901 bYuv888OverrideJpeg = true;
1902 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1903 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1904 }
1905
1906 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1907 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1908 commonFeatureMask);
1909 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1910 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1911
1912 rc = validateStreamDimensions(streamList);
1913 if (rc == NO_ERROR) {
1914 rc = validateStreamRotations(streamList);
1915 }
1916 if (rc != NO_ERROR) {
1917 LOGE("Invalid stream configuration requested!");
1918 pthread_mutex_unlock(&mMutex);
1919 return rc;
1920 }
1921
1922 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1923 for (size_t i = 0; i < streamList->num_streams; i++) {
1924 camera3_stream_t *newStream = streamList->streams[i];
1925 LOGH("newStream type = %d, stream format = %d "
1926 "stream size : %d x %d, stream rotation = %d",
1927 newStream->stream_type, newStream->format,
1928 newStream->width, newStream->height, newStream->rotation);
1929 //if the stream is in the mStreamList validate it
1930 bool stream_exists = false;
1931 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1932 it != mStreamInfo.end(); it++) {
1933 if ((*it)->stream == newStream) {
1934 QCamera3ProcessingChannel *channel =
1935 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1936 stream_exists = true;
1937 if (channel)
1938 delete channel;
1939 (*it)->status = VALID;
1940 (*it)->stream->priv = NULL;
1941 (*it)->channel = NULL;
1942 }
1943 }
1944 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1945 //new stream
1946 stream_info_t* stream_info;
1947 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1948 if (!stream_info) {
1949 LOGE("Could not allocate stream info");
1950 rc = -ENOMEM;
1951 pthread_mutex_unlock(&mMutex);
1952 return rc;
1953 }
1954 stream_info->stream = newStream;
1955 stream_info->status = VALID;
1956 stream_info->channel = NULL;
1957 mStreamInfo.push_back(stream_info);
1958 }
1959 /* Covers Opaque ZSL and API1 F/W ZSL */
1960 if (IS_USAGE_ZSL(newStream->usage)
1961 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1962 if (zslStream != NULL) {
1963 LOGE("Multiple input/reprocess streams requested!");
1964 pthread_mutex_unlock(&mMutex);
1965 return BAD_VALUE;
1966 }
1967 zslStream = newStream;
1968 }
1969 /* Covers YUV reprocess */
1970 if (inputStream != NULL) {
1971 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1972 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1973 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1974 && inputStream->width == newStream->width
1975 && inputStream->height == newStream->height) {
1976 if (zslStream != NULL) {
1977 /* This scenario indicates multiple YUV streams with same size
1978 * as input stream have been requested, since zsl stream handle
1979 * is solely use for the purpose of overriding the size of streams
1980 * which share h/w streams we will just make a guess here as to
1981 * which of the stream is a ZSL stream, this will be refactored
1982 * once we make generic logic for streams sharing encoder output
1983 */
1984 LOGH("Warning, Multiple ip/reprocess streams requested!");
1985 }
1986 zslStream = newStream;
1987 }
1988 }
1989 }
1990
1991 /* If a zsl stream is set, we know that we have configured at least one input or
1992 bidirectional stream */
1993 if (NULL != zslStream) {
1994 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1995 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1996 mInputStreamInfo.format = zslStream->format;
1997 mInputStreamInfo.usage = zslStream->usage;
1998 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1999 mInputStreamInfo.dim.width,
2000 mInputStreamInfo.dim.height,
2001 mInputStreamInfo.format, mInputStreamInfo.usage);
2002 }
2003
2004 cleanAndSortStreamInfo();
2005 if (mMetadataChannel) {
2006 delete mMetadataChannel;
2007 mMetadataChannel = NULL;
2008 }
2009 if (mSupportChannel) {
2010 delete mSupportChannel;
2011 mSupportChannel = NULL;
2012 }
2013
2014 if (mAnalysisChannel) {
2015 delete mAnalysisChannel;
2016 mAnalysisChannel = NULL;
2017 }
2018
2019 if (mDummyBatchChannel) {
2020 delete mDummyBatchChannel;
2021 mDummyBatchChannel = NULL;
2022 }
2023
Emilian Peev7650c122017-01-19 08:24:33 -08002024 if (mDepthChannel) {
2025 mDepthChannel = NULL;
2026 }
2027
Thierry Strudel2896d122017-02-23 19:18:03 -08002028 char is_type_value[PROPERTY_VALUE_MAX];
2029 property_get("persist.camera.is_type", is_type_value, "4");
2030 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2031
Thierry Strudel3d639192016-09-09 11:52:26 -07002032 //Create metadata channel and initialize it
2033 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2034 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2035 gCamCapability[mCameraId]->color_arrangement);
2036 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2037 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002038 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002039 if (mMetadataChannel == NULL) {
2040 LOGE("failed to allocate metadata channel");
2041 rc = -ENOMEM;
2042 pthread_mutex_unlock(&mMutex);
2043 return rc;
2044 }
2045 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2046 if (rc < 0) {
2047 LOGE("metadata channel initialization failed");
2048 delete mMetadataChannel;
2049 mMetadataChannel = NULL;
2050 pthread_mutex_unlock(&mMutex);
2051 return rc;
2052 }
2053
Thierry Strudel2896d122017-02-23 19:18:03 -08002054 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002055 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002056 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002057 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2058 /* Allocate channel objects for the requested streams */
2059 for (size_t i = 0; i < streamList->num_streams; i++) {
2060 camera3_stream_t *newStream = streamList->streams[i];
2061 uint32_t stream_usage = newStream->usage;
2062 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2063 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2064 struct camera_info *p_info = NULL;
2065 pthread_mutex_lock(&gCamLock);
2066 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2067 pthread_mutex_unlock(&gCamLock);
2068 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2069 || IS_USAGE_ZSL(newStream->usage)) &&
2070 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002071 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002072 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002073 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2074 if (bUseCommonFeatureMask)
2075 zsl_ppmask = commonFeatureMask;
2076 else
2077 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002079 if (numStreamsOnEncoder > 0)
2080 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2081 else
2082 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002083 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002084 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002085 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002086 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002087 LOGH("Input stream configured, reprocess config");
2088 } else {
2089 //for non zsl streams find out the format
2090 switch (newStream->format) {
2091 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2092 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002093 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002094 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2095 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2096 /* add additional features to pp feature mask */
2097 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2098 mStreamConfigInfo.num_streams);
2099
2100 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2101 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2102 CAM_STREAM_TYPE_VIDEO;
2103 if (m_bTnrEnabled && m_bTnrVideo) {
2104 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2105 CAM_QCOM_FEATURE_CPP_TNR;
2106 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2108 ~CAM_QCOM_FEATURE_CDS;
2109 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002110 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2111 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2112 CAM_QTI_FEATURE_PPEISCORE;
2113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002114 } else {
2115 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2116 CAM_STREAM_TYPE_PREVIEW;
2117 if (m_bTnrEnabled && m_bTnrPreview) {
2118 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2119 CAM_QCOM_FEATURE_CPP_TNR;
2120 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2121 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2122 ~CAM_QCOM_FEATURE_CDS;
2123 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002124 if(!m_bSwTnrPreview) {
2125 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2126 ~CAM_QTI_FEATURE_SW_TNR;
2127 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 padding_info.width_padding = mSurfaceStridePadding;
2129 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002130 previewSize.width = (int32_t)newStream->width;
2131 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002132 }
2133 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2134 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2135 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2136 newStream->height;
2137 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2138 newStream->width;
2139 }
2140 }
2141 break;
2142 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002143 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002144 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2145 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2146 if (bUseCommonFeatureMask)
2147 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2148 commonFeatureMask;
2149 else
2150 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2151 CAM_QCOM_FEATURE_NONE;
2152 } else {
2153 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2154 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2155 }
2156 break;
2157 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002158 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002159 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2160 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2161 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2162 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2163 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002164 /* Remove rotation if it is not supported
2165 for 4K LiveVideo snapshot case (online processing) */
2166 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2167 CAM_QCOM_FEATURE_ROTATION)) {
2168 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2169 &= ~CAM_QCOM_FEATURE_ROTATION;
2170 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002171 } else {
2172 if (bUseCommonFeatureMask &&
2173 isOnEncoder(maxViewfinderSize, newStream->width,
2174 newStream->height)) {
2175 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2176 } else {
2177 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2178 }
2179 }
2180 if (isZsl) {
2181 if (zslStream) {
2182 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2183 (int32_t)zslStream->width;
2184 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2185 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002186 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2187 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002188 } else {
2189 LOGE("Error, No ZSL stream identified");
2190 pthread_mutex_unlock(&mMutex);
2191 return -EINVAL;
2192 }
2193 } else if (m_bIs4KVideo) {
2194 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2195 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2196 } else if (bYuv888OverrideJpeg) {
2197 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2198 (int32_t)largeYuv888Size.width;
2199 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2200 (int32_t)largeYuv888Size.height;
2201 }
2202 break;
2203 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2204 case HAL_PIXEL_FORMAT_RAW16:
2205 case HAL_PIXEL_FORMAT_RAW10:
2206 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2207 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2208 isRawStreamRequested = true;
2209 break;
2210 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002211 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002212 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2213 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2214 break;
2215 }
2216 }
2217
2218 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2219 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2220 gCamCapability[mCameraId]->color_arrangement);
2221
2222 if (newStream->priv == NULL) {
2223 //New stream, construct channel
2224 switch (newStream->stream_type) {
2225 case CAMERA3_STREAM_INPUT:
2226 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2227 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2228 break;
2229 case CAMERA3_STREAM_BIDIRECTIONAL:
2230 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2231 GRALLOC_USAGE_HW_CAMERA_WRITE;
2232 break;
2233 case CAMERA3_STREAM_OUTPUT:
2234 /* For video encoding stream, set read/write rarely
2235 * flag so that they may be set to un-cached */
2236 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2237 newStream->usage |=
2238 (GRALLOC_USAGE_SW_READ_RARELY |
2239 GRALLOC_USAGE_SW_WRITE_RARELY |
2240 GRALLOC_USAGE_HW_CAMERA_WRITE);
2241 else if (IS_USAGE_ZSL(newStream->usage))
2242 {
2243 LOGD("ZSL usage flag skipping");
2244 }
2245 else if (newStream == zslStream
2246 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2247 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2248 } else
2249 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2250 break;
2251 default:
2252 LOGE("Invalid stream_type %d", newStream->stream_type);
2253 break;
2254 }
2255
2256 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2257 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2258 QCamera3ProcessingChannel *channel = NULL;
2259 switch (newStream->format) {
2260 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2261 if ((newStream->usage &
2262 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2263 (streamList->operation_mode ==
2264 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2265 ) {
2266 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2267 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002268 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 this,
2270 newStream,
2271 (cam_stream_type_t)
2272 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2273 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2274 mMetadataChannel,
2275 0); //heap buffers are not required for HFR video channel
2276 if (channel == NULL) {
2277 LOGE("allocation of channel failed");
2278 pthread_mutex_unlock(&mMutex);
2279 return -ENOMEM;
2280 }
2281 //channel->getNumBuffers() will return 0 here so use
2282 //MAX_INFLIGH_HFR_REQUESTS
2283 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2284 newStream->priv = channel;
2285 LOGI("num video buffers in HFR mode: %d",
2286 MAX_INFLIGHT_HFR_REQUESTS);
2287 } else {
2288 /* Copy stream contents in HFR preview only case to create
2289 * dummy batch channel so that sensor streaming is in
2290 * HFR mode */
2291 if (!m_bIsVideo && (streamList->operation_mode ==
2292 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2293 mDummyBatchStream = *newStream;
2294 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002295 int bufferCount = MAX_INFLIGHT_REQUESTS;
2296 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2297 CAM_STREAM_TYPE_VIDEO) {
2298 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2299 bufferCount = MAX_VIDEO_BUFFERS;
2300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2302 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002303 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 this,
2305 newStream,
2306 (cam_stream_type_t)
2307 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2308 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2309 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002310 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 if (channel == NULL) {
2312 LOGE("allocation of channel failed");
2313 pthread_mutex_unlock(&mMutex);
2314 return -ENOMEM;
2315 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002316 /* disable UBWC for preview, though supported,
2317 * to take advantage of CPP duplication */
2318 if (m_bIsVideo && (!mCommon.isVideoUBWCEnabled()) &&
2319 (previewSize.width == (int32_t)videoWidth)&&
2320 (previewSize.height == (int32_t)videoHeight)){
2321 channel->setUBWCEnabled(false);
2322 }else {
2323 channel->setUBWCEnabled(true);
2324 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 newStream->max_buffers = channel->getNumBuffers();
2326 newStream->priv = channel;
2327 }
2328 break;
2329 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2330 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2331 mChannelHandle,
2332 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002333 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 this,
2335 newStream,
2336 (cam_stream_type_t)
2337 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2338 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2339 mMetadataChannel);
2340 if (channel == NULL) {
2341 LOGE("allocation of YUV channel failed");
2342 pthread_mutex_unlock(&mMutex);
2343 return -ENOMEM;
2344 }
2345 newStream->max_buffers = channel->getNumBuffers();
2346 newStream->priv = channel;
2347 break;
2348 }
2349 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2350 case HAL_PIXEL_FORMAT_RAW16:
2351 case HAL_PIXEL_FORMAT_RAW10:
2352 mRawChannel = new QCamera3RawChannel(
2353 mCameraHandle->camera_handle, mChannelHandle,
2354 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002355 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002356 this, newStream,
2357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2358 mMetadataChannel,
2359 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2360 if (mRawChannel == NULL) {
2361 LOGE("allocation of raw channel failed");
2362 pthread_mutex_unlock(&mMutex);
2363 return -ENOMEM;
2364 }
2365 newStream->max_buffers = mRawChannel->getNumBuffers();
2366 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2367 break;
2368 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002369 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2370 mDepthChannel = new QCamera3DepthChannel(
2371 mCameraHandle->camera_handle, mChannelHandle,
2372 mCameraHandle->ops, NULL, NULL, &padding_info,
2373 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2374 mMetadataChannel);
2375 if (NULL == mDepthChannel) {
2376 LOGE("Allocation of depth channel failed");
2377 pthread_mutex_unlock(&mMutex);
2378 return NO_MEMORY;
2379 }
2380 newStream->priv = mDepthChannel;
2381 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2382 } else {
2383 // Max live snapshot inflight buffer is 1. This is to mitigate
2384 // frame drop issues for video snapshot. The more buffers being
2385 // allocated, the more frame drops there are.
2386 mPictureChannel = new QCamera3PicChannel(
2387 mCameraHandle->camera_handle, mChannelHandle,
2388 mCameraHandle->ops, captureResultCb,
2389 setBufferErrorStatus, &padding_info, this, newStream,
2390 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2391 m_bIs4KVideo, isZsl, mMetadataChannel,
2392 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2393 if (mPictureChannel == NULL) {
2394 LOGE("allocation of channel failed");
2395 pthread_mutex_unlock(&mMutex);
2396 return -ENOMEM;
2397 }
2398 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2399 newStream->max_buffers = mPictureChannel->getNumBuffers();
2400 mPictureChannel->overrideYuvSize(
2401 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 break;
2405
2406 default:
2407 LOGE("not a supported format 0x%x", newStream->format);
2408 break;
2409 }
2410 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2411 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2412 } else {
2413 LOGE("Error, Unknown stream type");
2414 pthread_mutex_unlock(&mMutex);
2415 return -EINVAL;
2416 }
2417
2418 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2419 if (channel != NULL && channel->isUBWCEnabled()) {
2420 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002421 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2422 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002423 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2424 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2425 }
2426 }
2427
2428 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2429 it != mStreamInfo.end(); it++) {
2430 if ((*it)->stream == newStream) {
2431 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2432 break;
2433 }
2434 }
2435 } else {
2436 // Channel already exists for this stream
2437 // Do nothing for now
2438 }
2439 padding_info = gCamCapability[mCameraId]->padding_info;
2440
Emilian Peev7650c122017-01-19 08:24:33 -08002441 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002442 * since there is no real stream associated with it
2443 */
Emilian Peev7650c122017-01-19 08:24:33 -08002444 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2445 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002446 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002447 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002448 }
2449
Thierry Strudel2896d122017-02-23 19:18:03 -08002450 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2451 onlyRaw = false;
2452 }
2453
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002454 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002455 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002456 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002457 cam_analysis_info_t analysisInfo;
2458 int32_t ret = NO_ERROR;
2459 ret = mCommon.getAnalysisInfo(
2460 FALSE,
2461 analysisFeatureMask,
2462 &analysisInfo);
2463 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002464 cam_color_filter_arrangement_t analysis_color_arrangement =
2465 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2466 CAM_FILTER_ARRANGEMENT_Y :
2467 gCamCapability[mCameraId]->color_arrangement);
2468 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2469 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002470 cam_dimension_t analysisDim;
2471 analysisDim = mCommon.getMatchingDimension(previewSize,
2472 analysisInfo.analysis_recommended_res);
2473
2474 mAnalysisChannel = new QCamera3SupportChannel(
2475 mCameraHandle->camera_handle,
2476 mChannelHandle,
2477 mCameraHandle->ops,
2478 &analysisInfo.analysis_padding_info,
2479 analysisFeatureMask,
2480 CAM_STREAM_TYPE_ANALYSIS,
2481 &analysisDim,
2482 (analysisInfo.analysis_format
2483 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2484 : CAM_FORMAT_YUV_420_NV21),
2485 analysisInfo.hw_analysis_supported,
2486 gCamCapability[mCameraId]->color_arrangement,
2487 this,
2488 0); // force buffer count to 0
2489 } else {
2490 LOGW("getAnalysisInfo failed, ret = %d", ret);
2491 }
2492 if (!mAnalysisChannel) {
2493 LOGW("Analysis channel cannot be created");
2494 }
2495 }
2496
Thierry Strudel3d639192016-09-09 11:52:26 -07002497 //RAW DUMP channel
2498 if (mEnableRawDump && isRawStreamRequested == false){
2499 cam_dimension_t rawDumpSize;
2500 rawDumpSize = getMaxRawSize(mCameraId);
2501 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2502 setPAAFSupport(rawDumpFeatureMask,
2503 CAM_STREAM_TYPE_RAW,
2504 gCamCapability[mCameraId]->color_arrangement);
2505 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2506 mChannelHandle,
2507 mCameraHandle->ops,
2508 rawDumpSize,
2509 &padding_info,
2510 this, rawDumpFeatureMask);
2511 if (!mRawDumpChannel) {
2512 LOGE("Raw Dump channel cannot be created");
2513 pthread_mutex_unlock(&mMutex);
2514 return -ENOMEM;
2515 }
2516 }
2517
Chien-Yu Chenee335912017-02-09 17:53:20 -08002518 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002519 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002520 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002521 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2522 "HDR+ RAW source channel is not created.",
2523 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002524 } else {
2525 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2526 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2527 setPAAFSupport(hdrPlusRawFeatureMask,
2528 CAM_STREAM_TYPE_RAW,
2529 gCamCapability[mCameraId]->color_arrangement);
2530 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2531 mChannelHandle,
2532 mCameraHandle->ops,
2533 rawSize,
2534 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002535 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002536 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002537 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002538 if (!mHdrPlusRawSrcChannel) {
2539 LOGE("HDR+ Raw Source channel cannot be created");
2540 pthread_mutex_unlock(&mMutex);
2541 return -ENOMEM;
2542 }
2543 }
2544 }
2545
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 if (mAnalysisChannel) {
2547 cam_analysis_info_t analysisInfo;
2548 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2549 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2550 CAM_STREAM_TYPE_ANALYSIS;
2551 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2552 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002553 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002554 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2555 &analysisInfo);
2556 if (rc != NO_ERROR) {
2557 LOGE("getAnalysisInfo failed, ret = %d", rc);
2558 pthread_mutex_unlock(&mMutex);
2559 return rc;
2560 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002561 cam_color_filter_arrangement_t analysis_color_arrangement =
2562 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2563 CAM_FILTER_ARRANGEMENT_Y :
2564 gCamCapability[mCameraId]->color_arrangement);
2565 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2566 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2567 analysis_color_arrangement);
2568
Thierry Strudel3d639192016-09-09 11:52:26 -07002569 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002570 mCommon.getMatchingDimension(previewSize,
2571 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002572 mStreamConfigInfo.num_streams++;
2573 }
2574
Thierry Strudel2896d122017-02-23 19:18:03 -08002575 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002576 cam_analysis_info_t supportInfo;
2577 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2578 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2579 setPAAFSupport(callbackFeatureMask,
2580 CAM_STREAM_TYPE_CALLBACK,
2581 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002582 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002583 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002584 if (ret != NO_ERROR) {
2585 /* Ignore the error for Mono camera
2586 * because the PAAF bit mask is only set
2587 * for CAM_STREAM_TYPE_ANALYSIS stream type
2588 */
2589 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2590 LOGW("getAnalysisInfo failed, ret = %d", ret);
2591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002592 }
2593 mSupportChannel = new QCamera3SupportChannel(
2594 mCameraHandle->camera_handle,
2595 mChannelHandle,
2596 mCameraHandle->ops,
2597 &gCamCapability[mCameraId]->padding_info,
2598 callbackFeatureMask,
2599 CAM_STREAM_TYPE_CALLBACK,
2600 &QCamera3SupportChannel::kDim,
2601 CAM_FORMAT_YUV_420_NV21,
2602 supportInfo.hw_analysis_supported,
2603 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002604 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 if (!mSupportChannel) {
2606 LOGE("dummy channel cannot be created");
2607 pthread_mutex_unlock(&mMutex);
2608 return -ENOMEM;
2609 }
2610 }
2611
2612 if (mSupportChannel) {
2613 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2614 QCamera3SupportChannel::kDim;
2615 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2616 CAM_STREAM_TYPE_CALLBACK;
2617 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2618 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2619 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2620 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2621 gCamCapability[mCameraId]->color_arrangement);
2622 mStreamConfigInfo.num_streams++;
2623 }
2624
2625 if (mRawDumpChannel) {
2626 cam_dimension_t rawSize;
2627 rawSize = getMaxRawSize(mCameraId);
2628 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2629 rawSize;
2630 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2631 CAM_STREAM_TYPE_RAW;
2632 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2633 CAM_QCOM_FEATURE_NONE;
2634 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 gCamCapability[mCameraId]->color_arrangement);
2637 mStreamConfigInfo.num_streams++;
2638 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002639
2640 if (mHdrPlusRawSrcChannel) {
2641 cam_dimension_t rawSize;
2642 rawSize = getMaxRawSize(mCameraId);
2643 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2644 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2645 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2646 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2647 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2648 gCamCapability[mCameraId]->color_arrangement);
2649 mStreamConfigInfo.num_streams++;
2650 }
2651
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 /* In HFR mode, if video stream is not added, create a dummy channel so that
2653 * ISP can create a batch mode even for preview only case. This channel is
2654 * never 'start'ed (no stream-on), it is only 'initialized' */
2655 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2656 !m_bIsVideo) {
2657 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2658 setPAAFSupport(dummyFeatureMask,
2659 CAM_STREAM_TYPE_VIDEO,
2660 gCamCapability[mCameraId]->color_arrangement);
2661 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2662 mChannelHandle,
2663 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002664 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002665 this,
2666 &mDummyBatchStream,
2667 CAM_STREAM_TYPE_VIDEO,
2668 dummyFeatureMask,
2669 mMetadataChannel);
2670 if (NULL == mDummyBatchChannel) {
2671 LOGE("creation of mDummyBatchChannel failed."
2672 "Preview will use non-hfr sensor mode ");
2673 }
2674 }
2675 if (mDummyBatchChannel) {
2676 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2677 mDummyBatchStream.width;
2678 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2679 mDummyBatchStream.height;
2680 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2681 CAM_STREAM_TYPE_VIDEO;
2682 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2683 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2684 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2685 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2686 gCamCapability[mCameraId]->color_arrangement);
2687 mStreamConfigInfo.num_streams++;
2688 }
2689
2690 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2691 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002692 m_bIs4KVideo ? 0 :
2693 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002694
2695 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2696 for (pendingRequestIterator i = mPendingRequestsList.begin();
2697 i != mPendingRequestsList.end();) {
2698 i = erasePendingRequest(i);
2699 }
2700 mPendingFrameDropList.clear();
2701 // Initialize/Reset the pending buffers list
2702 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2703 req.mPendingBufferList.clear();
2704 }
2705 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2706
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 mCurJpegMeta.clear();
2708 //Get min frame duration for this streams configuration
2709 deriveMinFrameDuration();
2710
Chien-Yu Chenee335912017-02-09 17:53:20 -08002711 mFirstPreviewIntentSeen = false;
2712
2713 // Disable HRD+ if it's enabled;
2714 disableHdrPlusModeLocked();
2715
Thierry Strudel3d639192016-09-09 11:52:26 -07002716 // Update state
2717 mState = CONFIGURED;
2718
2719 pthread_mutex_unlock(&mMutex);
2720
2721 return rc;
2722}
2723
2724/*===========================================================================
2725 * FUNCTION : validateCaptureRequest
2726 *
2727 * DESCRIPTION: validate a capture request from camera service
2728 *
2729 * PARAMETERS :
2730 * @request : request from framework to process
2731 *
2732 * RETURN :
2733 *
2734 *==========================================================================*/
2735int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002736 camera3_capture_request_t *request,
2737 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002738{
2739 ssize_t idx = 0;
2740 const camera3_stream_buffer_t *b;
2741 CameraMetadata meta;
2742
2743 /* Sanity check the request */
2744 if (request == NULL) {
2745 LOGE("NULL capture request");
2746 return BAD_VALUE;
2747 }
2748
2749 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2750 /*settings cannot be null for the first request*/
2751 return BAD_VALUE;
2752 }
2753
2754 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002755 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2756 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 LOGE("Request %d: No output buffers provided!",
2758 __FUNCTION__, frameNumber);
2759 return BAD_VALUE;
2760 }
2761 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2762 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2763 request->num_output_buffers, MAX_NUM_STREAMS);
2764 return BAD_VALUE;
2765 }
2766 if (request->input_buffer != NULL) {
2767 b = request->input_buffer;
2768 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2769 LOGE("Request %d: Buffer %ld: Status not OK!",
2770 frameNumber, (long)idx);
2771 return BAD_VALUE;
2772 }
2773 if (b->release_fence != -1) {
2774 LOGE("Request %d: Buffer %ld: Has a release fence!",
2775 frameNumber, (long)idx);
2776 return BAD_VALUE;
2777 }
2778 if (b->buffer == NULL) {
2779 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2780 frameNumber, (long)idx);
2781 return BAD_VALUE;
2782 }
2783 }
2784
2785 // Validate all buffers
2786 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002787 if (b == NULL) {
2788 return BAD_VALUE;
2789 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002790 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 QCamera3ProcessingChannel *channel =
2792 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2793 if (channel == NULL) {
2794 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2795 frameNumber, (long)idx);
2796 return BAD_VALUE;
2797 }
2798 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2799 LOGE("Request %d: Buffer %ld: Status not OK!",
2800 frameNumber, (long)idx);
2801 return BAD_VALUE;
2802 }
2803 if (b->release_fence != -1) {
2804 LOGE("Request %d: Buffer %ld: Has a release fence!",
2805 frameNumber, (long)idx);
2806 return BAD_VALUE;
2807 }
2808 if (b->buffer == NULL) {
2809 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2810 frameNumber, (long)idx);
2811 return BAD_VALUE;
2812 }
2813 if (*(b->buffer) == NULL) {
2814 LOGE("Request %d: Buffer %ld: NULL private handle!",
2815 frameNumber, (long)idx);
2816 return BAD_VALUE;
2817 }
2818 idx++;
2819 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002821 return NO_ERROR;
2822}
2823
2824/*===========================================================================
2825 * FUNCTION : deriveMinFrameDuration
2826 *
2827 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2828 * on currently configured streams.
2829 *
2830 * PARAMETERS : NONE
2831 *
2832 * RETURN : NONE
2833 *
2834 *==========================================================================*/
2835void QCamera3HardwareInterface::deriveMinFrameDuration()
2836{
2837 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2838
2839 maxJpegDim = 0;
2840 maxProcessedDim = 0;
2841 maxRawDim = 0;
2842
2843 // Figure out maximum jpeg, processed, and raw dimensions
2844 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2845 it != mStreamInfo.end(); it++) {
2846
2847 // Input stream doesn't have valid stream_type
2848 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2849 continue;
2850
2851 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2852 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2853 if (dimension > maxJpegDim)
2854 maxJpegDim = dimension;
2855 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2856 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2857 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2858 if (dimension > maxRawDim)
2859 maxRawDim = dimension;
2860 } else {
2861 if (dimension > maxProcessedDim)
2862 maxProcessedDim = dimension;
2863 }
2864 }
2865
2866 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2867 MAX_SIZES_CNT);
2868
2869 //Assume all jpeg dimensions are in processed dimensions.
2870 if (maxJpegDim > maxProcessedDim)
2871 maxProcessedDim = maxJpegDim;
2872 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2873 if (maxProcessedDim > maxRawDim) {
2874 maxRawDim = INT32_MAX;
2875
2876 for (size_t i = 0; i < count; i++) {
2877 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2878 gCamCapability[mCameraId]->raw_dim[i].height;
2879 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2880 maxRawDim = dimension;
2881 }
2882 }
2883
2884 //Find minimum durations for processed, jpeg, and raw
2885 for (size_t i = 0; i < count; i++) {
2886 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2887 gCamCapability[mCameraId]->raw_dim[i].height) {
2888 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2889 break;
2890 }
2891 }
2892 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2893 for (size_t i = 0; i < count; i++) {
2894 if (maxProcessedDim ==
2895 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2896 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2897 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2898 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2899 break;
2900 }
2901 }
2902}
2903
2904/*===========================================================================
2905 * FUNCTION : getMinFrameDuration
2906 *
2907 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2908 * and current request configuration.
2909 *
2910 * PARAMETERS : @request: requset sent by the frameworks
2911 *
2912 * RETURN : min farme duration for a particular request
2913 *
2914 *==========================================================================*/
2915int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2916{
2917 bool hasJpegStream = false;
2918 bool hasRawStream = false;
2919 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2920 const camera3_stream_t *stream = request->output_buffers[i].stream;
2921 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2922 hasJpegStream = true;
2923 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2924 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2925 stream->format == HAL_PIXEL_FORMAT_RAW16)
2926 hasRawStream = true;
2927 }
2928
2929 if (!hasJpegStream)
2930 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2931 else
2932 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2933}
2934
2935/*===========================================================================
2936 * FUNCTION : handleBuffersDuringFlushLock
2937 *
2938 * DESCRIPTION: Account for buffers returned from back-end during flush
2939 * This function is executed while mMutex is held by the caller.
2940 *
2941 * PARAMETERS :
2942 * @buffer: image buffer for the callback
2943 *
2944 * RETURN :
2945 *==========================================================================*/
2946void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2947{
2948 bool buffer_found = false;
2949 for (List<PendingBuffersInRequest>::iterator req =
2950 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2951 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2952 for (List<PendingBufferInfo>::iterator i =
2953 req->mPendingBufferList.begin();
2954 i != req->mPendingBufferList.end(); i++) {
2955 if (i->buffer == buffer->buffer) {
2956 mPendingBuffersMap.numPendingBufsAtFlush--;
2957 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2958 buffer->buffer, req->frame_number,
2959 mPendingBuffersMap.numPendingBufsAtFlush);
2960 buffer_found = true;
2961 break;
2962 }
2963 }
2964 if (buffer_found) {
2965 break;
2966 }
2967 }
2968 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2969 //signal the flush()
2970 LOGD("All buffers returned to HAL. Continue flush");
2971 pthread_cond_signal(&mBuffersCond);
2972 }
2973}
2974
Thierry Strudel3d639192016-09-09 11:52:26 -07002975/*===========================================================================
2976 * FUNCTION : handleBatchMetadata
2977 *
2978 * DESCRIPTION: Handles metadata buffer callback in batch mode
2979 *
2980 * PARAMETERS : @metadata_buf: metadata buffer
2981 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2982 * the meta buf in this method
2983 *
2984 * RETURN :
2985 *
2986 *==========================================================================*/
2987void QCamera3HardwareInterface::handleBatchMetadata(
2988 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2989{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002990 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002991
2992 if (NULL == metadata_buf) {
2993 LOGE("metadata_buf is NULL");
2994 return;
2995 }
2996 /* In batch mode, the metdata will contain the frame number and timestamp of
2997 * the last frame in the batch. Eg: a batch containing buffers from request
2998 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2999 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3000 * multiple process_capture_results */
3001 metadata_buffer_t *metadata =
3002 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3003 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3004 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3005 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3006 uint32_t frame_number = 0, urgent_frame_number = 0;
3007 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3008 bool invalid_metadata = false;
3009 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3010 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003011 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003012
3013 int32_t *p_frame_number_valid =
3014 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3015 uint32_t *p_frame_number =
3016 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3017 int64_t *p_capture_time =
3018 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3019 int32_t *p_urgent_frame_number_valid =
3020 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3021 uint32_t *p_urgent_frame_number =
3022 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3023
3024 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3025 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3026 (NULL == p_urgent_frame_number)) {
3027 LOGE("Invalid metadata");
3028 invalid_metadata = true;
3029 } else {
3030 frame_number_valid = *p_frame_number_valid;
3031 last_frame_number = *p_frame_number;
3032 last_frame_capture_time = *p_capture_time;
3033 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3034 last_urgent_frame_number = *p_urgent_frame_number;
3035 }
3036
3037 /* In batchmode, when no video buffers are requested, set_parms are sent
3038 * for every capture_request. The difference between consecutive urgent
3039 * frame numbers and frame numbers should be used to interpolate the
3040 * corresponding frame numbers and time stamps */
3041 pthread_mutex_lock(&mMutex);
3042 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003043 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3044 if(idx < 0) {
3045 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3046 last_urgent_frame_number);
3047 mState = ERROR;
3048 pthread_mutex_unlock(&mMutex);
3049 return;
3050 }
3051 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003052 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3053 first_urgent_frame_number;
3054
3055 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3056 urgent_frame_number_valid,
3057 first_urgent_frame_number, last_urgent_frame_number);
3058 }
3059
3060 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003061 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3062 if(idx < 0) {
3063 LOGE("Invalid frame number received: %d. Irrecoverable error",
3064 last_frame_number);
3065 mState = ERROR;
3066 pthread_mutex_unlock(&mMutex);
3067 return;
3068 }
3069 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003070 frameNumDiff = last_frame_number + 1 -
3071 first_frame_number;
3072 mPendingBatchMap.removeItem(last_frame_number);
3073
3074 LOGD("frm: valid: %d frm_num: %d - %d",
3075 frame_number_valid,
3076 first_frame_number, last_frame_number);
3077
3078 }
3079 pthread_mutex_unlock(&mMutex);
3080
3081 if (urgent_frame_number_valid || frame_number_valid) {
3082 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3083 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3084 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3085 urgentFrameNumDiff, last_urgent_frame_number);
3086 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3087 LOGE("frameNumDiff: %d frameNum: %d",
3088 frameNumDiff, last_frame_number);
3089 }
3090
3091 for (size_t i = 0; i < loopCount; i++) {
3092 /* handleMetadataWithLock is called even for invalid_metadata for
3093 * pipeline depth calculation */
3094 if (!invalid_metadata) {
3095 /* Infer frame number. Batch metadata contains frame number of the
3096 * last frame */
3097 if (urgent_frame_number_valid) {
3098 if (i < urgentFrameNumDiff) {
3099 urgent_frame_number =
3100 first_urgent_frame_number + i;
3101 LOGD("inferred urgent frame_number: %d",
3102 urgent_frame_number);
3103 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3104 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3105 } else {
3106 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3107 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3108 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3109 }
3110 }
3111
3112 /* Infer frame number. Batch metadata contains frame number of the
3113 * last frame */
3114 if (frame_number_valid) {
3115 if (i < frameNumDiff) {
3116 frame_number = first_frame_number + i;
3117 LOGD("inferred frame_number: %d", frame_number);
3118 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3119 CAM_INTF_META_FRAME_NUMBER, frame_number);
3120 } else {
3121 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3122 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3123 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3124 }
3125 }
3126
3127 if (last_frame_capture_time) {
3128 //Infer timestamp
3129 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003130 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003131 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003132 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003133 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3134 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3135 LOGD("batch capture_time: %lld, capture_time: %lld",
3136 last_frame_capture_time, capture_time);
3137 }
3138 }
3139 pthread_mutex_lock(&mMutex);
3140 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003141 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003142 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3143 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003144 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003145 pthread_mutex_unlock(&mMutex);
3146 }
3147
3148 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003149 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003150 mMetadataChannel->bufDone(metadata_buf);
3151 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003152 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003153 }
3154}
3155
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003156void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3157 camera3_error_msg_code_t errorCode)
3158{
3159 camera3_notify_msg_t notify_msg;
3160 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3161 notify_msg.type = CAMERA3_MSG_ERROR;
3162 notify_msg.message.error.error_code = errorCode;
3163 notify_msg.message.error.error_stream = NULL;
3164 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003165 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003166
3167 return;
3168}
Thierry Strudel3d639192016-09-09 11:52:26 -07003169/*===========================================================================
3170 * FUNCTION : handleMetadataWithLock
3171 *
3172 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3173 *
3174 * PARAMETERS : @metadata_buf: metadata buffer
3175 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3176 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003177 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3178 * last urgent metadata in a batch. Always true for non-batch mode
3179 * @lastMetadataInBatch: Boolean to indicate whether this is the
3180 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003181 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3182 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003183 *
3184 * RETURN :
3185 *
3186 *==========================================================================*/
3187void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003188 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003189 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3190 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003191{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003192 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003193 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3194 //during flush do not send metadata from this thread
3195 LOGD("not sending metadata during flush or when mState is error");
3196 if (free_and_bufdone_meta_buf) {
3197 mMetadataChannel->bufDone(metadata_buf);
3198 free(metadata_buf);
3199 }
3200 return;
3201 }
3202
3203 //not in flush
3204 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3205 int32_t frame_number_valid, urgent_frame_number_valid;
3206 uint32_t frame_number, urgent_frame_number;
3207 int64_t capture_time;
3208 nsecs_t currentSysTime;
3209
3210 int32_t *p_frame_number_valid =
3211 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3212 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3213 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3214 int32_t *p_urgent_frame_number_valid =
3215 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3216 uint32_t *p_urgent_frame_number =
3217 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3218 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3219 metadata) {
3220 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3221 *p_frame_number_valid, *p_frame_number);
3222 }
3223
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003224 camera_metadata_t *resultMetadata = nullptr;
3225
Thierry Strudel3d639192016-09-09 11:52:26 -07003226 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3227 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3228 LOGE("Invalid metadata");
3229 if (free_and_bufdone_meta_buf) {
3230 mMetadataChannel->bufDone(metadata_buf);
3231 free(metadata_buf);
3232 }
3233 goto done_metadata;
3234 }
3235 frame_number_valid = *p_frame_number_valid;
3236 frame_number = *p_frame_number;
3237 capture_time = *p_capture_time;
3238 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3239 urgent_frame_number = *p_urgent_frame_number;
3240 currentSysTime = systemTime(CLOCK_MONOTONIC);
3241
3242 // Detect if buffers from any requests are overdue
3243 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003244 int64_t timeout;
3245 {
3246 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3247 // If there is a pending HDR+ request, the following requests may be blocked until the
3248 // HDR+ request is done. So allow a longer timeout.
3249 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3250 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3251 }
3252
3253 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003254 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003255 assert(missed.stream->priv);
3256 if (missed.stream->priv) {
3257 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3258 assert(ch->mStreams[0]);
3259 if (ch->mStreams[0]) {
3260 LOGE("Cancel missing frame = %d, buffer = %p,"
3261 "stream type = %d, stream format = %d",
3262 req.frame_number, missed.buffer,
3263 ch->mStreams[0]->getMyType(), missed.stream->format);
3264 ch->timeoutFrame(req.frame_number);
3265 }
3266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003267 }
3268 }
3269 }
3270 //Partial result on process_capture_result for timestamp
3271 if (urgent_frame_number_valid) {
3272 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3273 urgent_frame_number, capture_time);
3274
3275 //Recieved an urgent Frame Number, handle it
3276 //using partial results
3277 for (pendingRequestIterator i =
3278 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3279 LOGD("Iterator Frame = %d urgent frame = %d",
3280 i->frame_number, urgent_frame_number);
3281
3282 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3283 (i->partial_result_cnt == 0)) {
3284 LOGE("Error: HAL missed urgent metadata for frame number %d",
3285 i->frame_number);
3286 }
3287
3288 if (i->frame_number == urgent_frame_number &&
3289 i->bUrgentReceived == 0) {
3290
3291 camera3_capture_result_t result;
3292 memset(&result, 0, sizeof(camera3_capture_result_t));
3293
3294 i->partial_result_cnt++;
3295 i->bUrgentReceived = 1;
3296 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003297 result.result = translateCbUrgentMetadataToResultMetadata(
3298 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003299 // Populate metadata result
3300 result.frame_number = urgent_frame_number;
3301 result.num_output_buffers = 0;
3302 result.output_buffers = NULL;
3303 result.partial_result = i->partial_result_cnt;
3304
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003305 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003306 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003307 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003308 result.partial_result == PARTIAL_RESULT_COUNT);
3309 }
3310
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003311 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003312 LOGD("urgent frame_number = %u, capture_time = %lld",
3313 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003314 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3315 // Instant AEC settled for this frame.
3316 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3317 mInstantAECSettledFrameNumber = urgent_frame_number;
3318 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003319 free_camera_metadata((camera_metadata_t *)result.result);
3320 break;
3321 }
3322 }
3323 }
3324
3325 if (!frame_number_valid) {
3326 LOGD("Not a valid normal frame number, used as SOF only");
3327 if (free_and_bufdone_meta_buf) {
3328 mMetadataChannel->bufDone(metadata_buf);
3329 free(metadata_buf);
3330 }
3331 goto done_metadata;
3332 }
3333 LOGH("valid frame_number = %u, capture_time = %lld",
3334 frame_number, capture_time);
3335
Emilian Peev7650c122017-01-19 08:24:33 -08003336 if (metadata->is_depth_data_valid) {
3337 handleDepthDataLocked(metadata->depth_data, frame_number);
3338 }
3339
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003340 // Check whether any stream buffer corresponding to this is dropped or not
3341 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3342 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3343 for (auto & pendingRequest : mPendingRequestsList) {
3344 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3345 mInstantAECSettledFrameNumber)) {
3346 camera3_notify_msg_t notify_msg = {};
3347 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003348 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003349 QCamera3ProcessingChannel *channel =
3350 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003351 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003352 if (p_cam_frame_drop) {
3353 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003354 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003355 // Got the stream ID for drop frame.
3356 dropFrame = true;
3357 break;
3358 }
3359 }
3360 } else {
3361 // This is instant AEC case.
3362 // For instant AEC drop the stream untill AEC is settled.
3363 dropFrame = true;
3364 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003365
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003366 if (dropFrame) {
3367 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3368 if (p_cam_frame_drop) {
3369 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003370 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003371 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 } else {
3373 // For instant AEC, inform frame drop and frame number
3374 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3375 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003376 pendingRequest.frame_number, streamID,
3377 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003378 }
3379 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003380 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003381 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003382 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003383 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003384 if (p_cam_frame_drop) {
3385 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003386 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003388 } else {
3389 // For instant AEC, inform frame drop and frame number
3390 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3391 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003392 pendingRequest.frame_number, streamID,
3393 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003394 }
3395 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003396 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003397 PendingFrameDrop.stream_ID = streamID;
3398 // Add the Frame drop info to mPendingFrameDropList
3399 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003400 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003401 }
3402 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003403 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003404
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003405 for (auto & pendingRequest : mPendingRequestsList) {
3406 // Find the pending request with the frame number.
3407 if (pendingRequest.frame_number == frame_number) {
3408 // Update the sensor timestamp.
3409 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003410
Thierry Strudel3d639192016-09-09 11:52:26 -07003411
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003412 /* Set the timestamp in display metadata so that clients aware of
3413 private_handle such as VT can use this un-modified timestamps.
3414 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003415 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003416
Thierry Strudel3d639192016-09-09 11:52:26 -07003417 // Find channel requiring metadata, meaning internal offline postprocess
3418 // is needed.
3419 //TODO: for now, we don't support two streams requiring metadata at the same time.
3420 // (because we are not making copies, and metadata buffer is not reference counted.
3421 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003422 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3423 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003424 if (iter->need_metadata) {
3425 internalPproc = true;
3426 QCamera3ProcessingChannel *channel =
3427 (QCamera3ProcessingChannel *)iter->stream->priv;
3428 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003429 if(p_is_metabuf_queued != NULL) {
3430 *p_is_metabuf_queued = true;
3431 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003432 break;
3433 }
3434 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003435 for (auto itr = pendingRequest.internalRequestList.begin();
3436 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003437 if (itr->need_metadata) {
3438 internalPproc = true;
3439 QCamera3ProcessingChannel *channel =
3440 (QCamera3ProcessingChannel *)itr->stream->priv;
3441 channel->queueReprocMetadata(metadata_buf);
3442 break;
3443 }
3444 }
3445
Thierry Strudel54dc9782017-02-15 12:12:10 -08003446 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003447 resultMetadata = translateFromHalMetadata(metadata,
3448 pendingRequest.timestamp, pendingRequest.request_id,
3449 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3450 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003451 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003452 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003453 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003454 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003455 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003456 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003457
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003458 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003459
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003460 if (pendingRequest.blob_request) {
3461 //Dump tuning metadata if enabled and available
3462 char prop[PROPERTY_VALUE_MAX];
3463 memset(prop, 0, sizeof(prop));
3464 property_get("persist.camera.dumpmetadata", prop, "0");
3465 int32_t enabled = atoi(prop);
3466 if (enabled && metadata->is_tuning_params_valid) {
3467 dumpMetadataToFile(metadata->tuning_params,
3468 mMetaFrameCount,
3469 enabled,
3470 "Snapshot",
3471 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003472 }
3473 }
3474
3475 if (!internalPproc) {
3476 LOGD("couldn't find need_metadata for this metadata");
3477 // Return metadata buffer
3478 if (free_and_bufdone_meta_buf) {
3479 mMetadataChannel->bufDone(metadata_buf);
3480 free(metadata_buf);
3481 }
3482 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003483
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003484 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 }
3486 }
3487
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003488 // Try to send out shutter callbacks and capture results.
3489 handlePendingResultsWithLock(frame_number, resultMetadata);
3490 return;
3491
Thierry Strudel3d639192016-09-09 11:52:26 -07003492done_metadata:
3493 for (pendingRequestIterator i = mPendingRequestsList.begin();
3494 i != mPendingRequestsList.end() ;i++) {
3495 i->pipeline_depth++;
3496 }
3497 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3498 unblockRequestIfNecessary();
3499}
3500
3501/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003502 * FUNCTION : handleDepthDataWithLock
3503 *
3504 * DESCRIPTION: Handles incoming depth data
3505 *
3506 * PARAMETERS : @depthData : Depth data
3507 * @frameNumber: Frame number of the incoming depth data
3508 *
3509 * RETURN :
3510 *
3511 *==========================================================================*/
3512void QCamera3HardwareInterface::handleDepthDataLocked(
3513 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3514 uint32_t currentFrameNumber;
3515 buffer_handle_t *depthBuffer;
3516
3517 if (nullptr == mDepthChannel) {
3518 LOGE("Depth channel not present!");
3519 return;
3520 }
3521
3522 camera3_stream_buffer_t resultBuffer =
3523 {.acquire_fence = -1,
3524 .release_fence = -1,
3525 .status = CAMERA3_BUFFER_STATUS_OK,
3526 .buffer = nullptr,
3527 .stream = mDepthChannel->getStream()};
3528 camera3_capture_result_t result =
3529 {.result = nullptr,
3530 .num_output_buffers = 1,
3531 .output_buffers = &resultBuffer,
3532 .partial_result = 0,
3533 .frame_number = 0};
3534
3535 do {
3536 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3537 if (nullptr == depthBuffer) {
3538 break;
3539 }
3540
3541 result.frame_number = currentFrameNumber;
3542 resultBuffer.buffer = depthBuffer;
3543 if (currentFrameNumber == frameNumber) {
3544 int32_t rc = mDepthChannel->populateDepthData(depthData,
3545 frameNumber);
3546 if (NO_ERROR != rc) {
3547 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3548 } else {
3549 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3550 }
3551 } else if (currentFrameNumber > frameNumber) {
3552 break;
3553 } else {
3554 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3555 {{currentFrameNumber, mDepthChannel->getStream(),
3556 CAMERA3_MSG_ERROR_BUFFER}}};
3557 orchestrateNotify(&notify_msg);
3558
3559 LOGE("Depth buffer for frame number: %d is missing "
3560 "returning back!", currentFrameNumber);
3561 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3562 }
3563 mDepthChannel->unmapBuffer(currentFrameNumber);
3564
3565 orchestrateResult(&result);
3566 } while (currentFrameNumber < frameNumber);
3567}
3568
3569/*===========================================================================
3570 * FUNCTION : notifyErrorFoPendingDepthData
3571 *
3572 * DESCRIPTION: Returns error for any pending depth buffers
3573 *
3574 * PARAMETERS : depthCh - depth channel that needs to get flushed
3575 *
3576 * RETURN :
3577 *
3578 *==========================================================================*/
3579void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3580 QCamera3DepthChannel *depthCh) {
3581 uint32_t currentFrameNumber;
3582 buffer_handle_t *depthBuffer;
3583
3584 if (nullptr == depthCh) {
3585 return;
3586 }
3587
3588 camera3_notify_msg_t notify_msg =
3589 {.type = CAMERA3_MSG_ERROR,
3590 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3591 camera3_stream_buffer_t resultBuffer =
3592 {.acquire_fence = -1,
3593 .release_fence = -1,
3594 .buffer = nullptr,
3595 .stream = depthCh->getStream(),
3596 .status = CAMERA3_BUFFER_STATUS_ERROR};
3597 camera3_capture_result_t result =
3598 {.result = nullptr,
3599 .frame_number = 0,
3600 .num_output_buffers = 1,
3601 .partial_result = 0,
3602 .output_buffers = &resultBuffer};
3603
3604 while (nullptr !=
3605 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3606 depthCh->unmapBuffer(currentFrameNumber);
3607
3608 notify_msg.message.error.frame_number = currentFrameNumber;
3609 orchestrateNotify(&notify_msg);
3610
3611 resultBuffer.buffer = depthBuffer;
3612 result.frame_number = currentFrameNumber;
3613 orchestrateResult(&result);
3614 };
3615}
3616
3617/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003618 * FUNCTION : hdrPlusPerfLock
3619 *
3620 * DESCRIPTION: perf lock for HDR+ using custom intent
3621 *
3622 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3623 *
3624 * RETURN : None
3625 *
3626 *==========================================================================*/
3627void QCamera3HardwareInterface::hdrPlusPerfLock(
3628 mm_camera_super_buf_t *metadata_buf)
3629{
3630 if (NULL == metadata_buf) {
3631 LOGE("metadata_buf is NULL");
3632 return;
3633 }
3634 metadata_buffer_t *metadata =
3635 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3636 int32_t *p_frame_number_valid =
3637 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3638 uint32_t *p_frame_number =
3639 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3640
3641 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3642 LOGE("%s: Invalid metadata", __func__);
3643 return;
3644 }
3645
3646 //acquire perf lock for 5 sec after the last HDR frame is captured
3647 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3648 if ((p_frame_number != NULL) &&
3649 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003650 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003651 }
3652 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003653}
3654
3655/*===========================================================================
3656 * FUNCTION : handleInputBufferWithLock
3657 *
3658 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3659 *
3660 * PARAMETERS : @frame_number: frame number of the input buffer
3661 *
3662 * RETURN :
3663 *
3664 *==========================================================================*/
3665void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3666{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003667 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003668 pendingRequestIterator i = mPendingRequestsList.begin();
3669 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3670 i++;
3671 }
3672 if (i != mPendingRequestsList.end() && i->input_buffer) {
3673 //found the right request
3674 if (!i->shutter_notified) {
3675 CameraMetadata settings;
3676 camera3_notify_msg_t notify_msg;
3677 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3678 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3679 if(i->settings) {
3680 settings = i->settings;
3681 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3682 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3683 } else {
3684 LOGE("No timestamp in input settings! Using current one.");
3685 }
3686 } else {
3687 LOGE("Input settings missing!");
3688 }
3689
3690 notify_msg.type = CAMERA3_MSG_SHUTTER;
3691 notify_msg.message.shutter.frame_number = frame_number;
3692 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003693 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003694 i->shutter_notified = true;
3695 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3696 i->frame_number, notify_msg.message.shutter.timestamp);
3697 }
3698
3699 if (i->input_buffer->release_fence != -1) {
3700 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3701 close(i->input_buffer->release_fence);
3702 if (rc != OK) {
3703 LOGE("input buffer sync wait failed %d", rc);
3704 }
3705 }
3706
3707 camera3_capture_result result;
3708 memset(&result, 0, sizeof(camera3_capture_result));
3709 result.frame_number = frame_number;
3710 result.result = i->settings;
3711 result.input_buffer = i->input_buffer;
3712 result.partial_result = PARTIAL_RESULT_COUNT;
3713
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003714 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003715 LOGD("Input request metadata and input buffer frame_number = %u",
3716 i->frame_number);
3717 i = erasePendingRequest(i);
3718 } else {
3719 LOGE("Could not find input request for frame number %d", frame_number);
3720 }
3721}
3722
3723/*===========================================================================
3724 * FUNCTION : handleBufferWithLock
3725 *
3726 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3727 *
3728 * PARAMETERS : @buffer: image buffer for the callback
3729 * @frame_number: frame number of the image buffer
3730 *
3731 * RETURN :
3732 *
3733 *==========================================================================*/
3734void QCamera3HardwareInterface::handleBufferWithLock(
3735 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3736{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003737 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003738
3739 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3740 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3741 }
3742
Thierry Strudel3d639192016-09-09 11:52:26 -07003743 /* Nothing to be done during error state */
3744 if ((ERROR == mState) || (DEINIT == mState)) {
3745 return;
3746 }
3747 if (mFlushPerf) {
3748 handleBuffersDuringFlushLock(buffer);
3749 return;
3750 }
3751 //not in flush
3752 // If the frame number doesn't exist in the pending request list,
3753 // directly send the buffer to the frameworks, and update pending buffers map
3754 // Otherwise, book-keep the buffer.
3755 pendingRequestIterator i = mPendingRequestsList.begin();
3756 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3757 i++;
3758 }
3759 if (i == mPendingRequestsList.end()) {
3760 // Verify all pending requests frame_numbers are greater
3761 for (pendingRequestIterator j = mPendingRequestsList.begin();
3762 j != mPendingRequestsList.end(); j++) {
3763 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3764 LOGW("Error: pending live frame number %d is smaller than %d",
3765 j->frame_number, frame_number);
3766 }
3767 }
3768 camera3_capture_result_t result;
3769 memset(&result, 0, sizeof(camera3_capture_result_t));
3770 result.result = NULL;
3771 result.frame_number = frame_number;
3772 result.num_output_buffers = 1;
3773 result.partial_result = 0;
3774 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3775 m != mPendingFrameDropList.end(); m++) {
3776 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3777 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3778 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3779 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3780 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3781 frame_number, streamID);
3782 m = mPendingFrameDropList.erase(m);
3783 break;
3784 }
3785 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003786 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003787 result.output_buffers = buffer;
3788 LOGH("result frame_number = %d, buffer = %p",
3789 frame_number, buffer->buffer);
3790
3791 mPendingBuffersMap.removeBuf(buffer->buffer);
3792
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003793 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003794 } else {
3795 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003796 if (i->input_buffer->release_fence != -1) {
3797 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3798 close(i->input_buffer->release_fence);
3799 if (rc != OK) {
3800 LOGE("input buffer sync wait failed %d", rc);
3801 }
3802 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003804
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003805 // Put buffer into the pending request
3806 for (auto &requestedBuffer : i->buffers) {
3807 if (requestedBuffer.stream == buffer->stream) {
3808 if (requestedBuffer.buffer != nullptr) {
3809 LOGE("Error: buffer is already set");
3810 } else {
3811 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3812 sizeof(camera3_stream_buffer_t));
3813 *(requestedBuffer.buffer) = *buffer;
3814 LOGH("cache buffer %p at result frame_number %u",
3815 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003816 }
3817 }
3818 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819
3820 if (i->input_buffer) {
3821 // For a reprocessing request, try to send out shutter callback and result metadata.
3822 handlePendingResultsWithLock(frame_number, nullptr);
3823 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003824 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003825
3826 if (mPreviewStarted == false) {
3827 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3828 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3829 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3830 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3831 mPreviewStarted = true;
3832
3833 // Set power hint for preview
3834 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3835 }
3836 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003837}
3838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3840 const camera_metadata_t *resultMetadata)
3841{
3842 // Find the pending request for this result metadata.
3843 auto requestIter = mPendingRequestsList.begin();
3844 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3845 requestIter++;
3846 }
3847
3848 if (requestIter == mPendingRequestsList.end()) {
3849 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3850 return;
3851 }
3852
3853 // Update the result metadata
3854 requestIter->resultMetadata = resultMetadata;
3855
3856 // Check what type of request this is.
3857 bool liveRequest = false;
3858 if (requestIter->hdrplus) {
3859 // HDR+ request doesn't have partial results.
3860 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3861 } else if (requestIter->input_buffer != nullptr) {
3862 // Reprocessing request result is the same as settings.
3863 requestIter->resultMetadata = requestIter->settings;
3864 // Reprocessing request doesn't have partial results.
3865 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3866 } else {
3867 liveRequest = true;
3868 requestIter->partial_result_cnt++;
3869 mPendingLiveRequest--;
3870
3871 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003872 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3873 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003874 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3875 }
3876 }
3877
3878 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3879 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3880 bool readyToSend = true;
3881
3882 // Iterate through the pending requests to send out shutter callbacks and results that are
3883 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3884 // live requests that don't have result metadata yet.
3885 auto iter = mPendingRequestsList.begin();
3886 while (iter != mPendingRequestsList.end()) {
3887 // Check if current pending request is ready. If it's not ready, the following pending
3888 // requests are also not ready.
3889 if (readyToSend && iter->resultMetadata == nullptr) {
3890 readyToSend = false;
3891 }
3892
3893 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3894
3895 std::vector<camera3_stream_buffer_t> outputBuffers;
3896
3897 camera3_capture_result_t result = {};
3898 result.frame_number = iter->frame_number;
3899 result.result = iter->resultMetadata;
3900 result.partial_result = iter->partial_result_cnt;
3901
3902 // If this pending buffer has result metadata, we may be able to send out shutter callback
3903 // and result metadata.
3904 if (iter->resultMetadata != nullptr) {
3905 if (!readyToSend) {
3906 // If any of the previous pending request is not ready, this pending request is
3907 // also not ready to send in order to keep shutter callbacks and result metadata
3908 // in order.
3909 iter++;
3910 continue;
3911 }
3912
3913 // Invoke shutter callback if not yet.
3914 if (!iter->shutter_notified) {
3915 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3916
3917 // Find the timestamp in HDR+ result metadata
3918 camera_metadata_ro_entry_t entry;
3919 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3920 ANDROID_SENSOR_TIMESTAMP, &entry);
3921 if (res != OK) {
3922 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3923 __FUNCTION__, iter->frame_number, strerror(-res), res);
3924 } else {
3925 timestamp = entry.data.i64[0];
3926 }
3927
3928 camera3_notify_msg_t notify_msg = {};
3929 notify_msg.type = CAMERA3_MSG_SHUTTER;
3930 notify_msg.message.shutter.frame_number = iter->frame_number;
3931 notify_msg.message.shutter.timestamp = timestamp;
3932 orchestrateNotify(&notify_msg);
3933 iter->shutter_notified = true;
3934 }
3935
3936 result.input_buffer = iter->input_buffer;
3937
3938 // Prepare output buffer array
3939 for (auto bufferInfoIter = iter->buffers.begin();
3940 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3941 if (bufferInfoIter->buffer != nullptr) {
3942
3943 QCamera3Channel *channel =
3944 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3945 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3946
3947 // Check if this buffer is a dropped frame.
3948 auto frameDropIter = mPendingFrameDropList.begin();
3949 while (frameDropIter != mPendingFrameDropList.end()) {
3950 if((frameDropIter->stream_ID == streamID) &&
3951 (frameDropIter->frame_number == frameNumber)) {
3952 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3953 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3954 streamID);
3955 mPendingFrameDropList.erase(frameDropIter);
3956 break;
3957 } else {
3958 frameDropIter++;
3959 }
3960 }
3961
3962 // Check buffer error status
3963 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3964 bufferInfoIter->buffer->buffer);
3965 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3966
3967 outputBuffers.push_back(*(bufferInfoIter->buffer));
3968 free(bufferInfoIter->buffer);
3969 bufferInfoIter->buffer = NULL;
3970 }
3971 }
3972
3973 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3974 result.num_output_buffers = outputBuffers.size();
3975 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3976 // If the result metadata belongs to a live request, notify errors for previous pending
3977 // live requests.
3978 mPendingLiveRequest--;
3979
3980 CameraMetadata dummyMetadata;
3981 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3982 result.result = dummyMetadata.release();
3983
3984 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3985 } else {
3986 iter++;
3987 continue;
3988 }
3989
3990 orchestrateResult(&result);
3991
3992 // For reprocessing, result metadata is the same as settings so do not free it here to
3993 // avoid double free.
3994 if (result.result != iter->settings) {
3995 free_camera_metadata((camera_metadata_t *)result.result);
3996 }
3997 iter->resultMetadata = nullptr;
3998 iter = erasePendingRequest(iter);
3999 }
4000
4001 if (liveRequest) {
4002 for (auto &iter : mPendingRequestsList) {
4003 // Increment pipeline depth for the following pending requests.
4004 if (iter.frame_number > frameNumber) {
4005 iter.pipeline_depth++;
4006 }
4007 }
4008 }
4009
4010 unblockRequestIfNecessary();
4011}
4012
Thierry Strudel3d639192016-09-09 11:52:26 -07004013/*===========================================================================
4014 * FUNCTION : unblockRequestIfNecessary
4015 *
4016 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4017 * that mMutex is held when this function is called.
4018 *
4019 * PARAMETERS :
4020 *
4021 * RETURN :
4022 *
4023 *==========================================================================*/
4024void QCamera3HardwareInterface::unblockRequestIfNecessary()
4025{
4026 // Unblock process_capture_request
4027 pthread_cond_signal(&mRequestCond);
4028}
4029
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004030/*===========================================================================
4031 * FUNCTION : isHdrSnapshotRequest
4032 *
4033 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4034 *
4035 * PARAMETERS : camera3 request structure
4036 *
4037 * RETURN : boolean decision variable
4038 *
4039 *==========================================================================*/
4040bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4041{
4042 if (request == NULL) {
4043 LOGE("Invalid request handle");
4044 assert(0);
4045 return false;
4046 }
4047
4048 if (!mForceHdrSnapshot) {
4049 CameraMetadata frame_settings;
4050 frame_settings = request->settings;
4051
4052 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4053 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4054 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4055 return false;
4056 }
4057 } else {
4058 return false;
4059 }
4060
4061 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4062 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4063 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4064 return false;
4065 }
4066 } else {
4067 return false;
4068 }
4069 }
4070
4071 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4072 if (request->output_buffers[i].stream->format
4073 == HAL_PIXEL_FORMAT_BLOB) {
4074 return true;
4075 }
4076 }
4077
4078 return false;
4079}
4080/*===========================================================================
4081 * FUNCTION : orchestrateRequest
4082 *
4083 * DESCRIPTION: Orchestrates a capture request from camera service
4084 *
4085 * PARAMETERS :
4086 * @request : request from framework to process
4087 *
4088 * RETURN : Error status codes
4089 *
4090 *==========================================================================*/
4091int32_t QCamera3HardwareInterface::orchestrateRequest(
4092 camera3_capture_request_t *request)
4093{
4094
4095 uint32_t originalFrameNumber = request->frame_number;
4096 uint32_t originalOutputCount = request->num_output_buffers;
4097 const camera_metadata_t *original_settings = request->settings;
4098 List<InternalRequest> internallyRequestedStreams;
4099 List<InternalRequest> emptyInternalList;
4100
4101 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4102 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4103 uint32_t internalFrameNumber;
4104 CameraMetadata modified_meta;
4105
4106
4107 /* Add Blob channel to list of internally requested streams */
4108 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4109 if (request->output_buffers[i].stream->format
4110 == HAL_PIXEL_FORMAT_BLOB) {
4111 InternalRequest streamRequested;
4112 streamRequested.meteringOnly = 1;
4113 streamRequested.need_metadata = 0;
4114 streamRequested.stream = request->output_buffers[i].stream;
4115 internallyRequestedStreams.push_back(streamRequested);
4116 }
4117 }
4118 request->num_output_buffers = 0;
4119 auto itr = internallyRequestedStreams.begin();
4120
4121 /* Modify setting to set compensation */
4122 modified_meta = request->settings;
4123 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4124 uint8_t aeLock = 1;
4125 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4126 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4127 camera_metadata_t *modified_settings = modified_meta.release();
4128 request->settings = modified_settings;
4129
4130 /* Capture Settling & -2x frame */
4131 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4132 request->frame_number = internalFrameNumber;
4133 processCaptureRequest(request, internallyRequestedStreams);
4134
4135 request->num_output_buffers = originalOutputCount;
4136 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4137 request->frame_number = internalFrameNumber;
4138 processCaptureRequest(request, emptyInternalList);
4139 request->num_output_buffers = 0;
4140
4141 modified_meta = modified_settings;
4142 expCompensation = 0;
4143 aeLock = 1;
4144 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4145 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4146 modified_settings = modified_meta.release();
4147 request->settings = modified_settings;
4148
4149 /* Capture Settling & 0X frame */
4150
4151 itr = internallyRequestedStreams.begin();
4152 if (itr == internallyRequestedStreams.end()) {
4153 LOGE("Error Internally Requested Stream list is empty");
4154 assert(0);
4155 } else {
4156 itr->need_metadata = 0;
4157 itr->meteringOnly = 1;
4158 }
4159
4160 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4161 request->frame_number = internalFrameNumber;
4162 processCaptureRequest(request, internallyRequestedStreams);
4163
4164 itr = internallyRequestedStreams.begin();
4165 if (itr == internallyRequestedStreams.end()) {
4166 ALOGE("Error Internally Requested Stream list is empty");
4167 assert(0);
4168 } else {
4169 itr->need_metadata = 1;
4170 itr->meteringOnly = 0;
4171 }
4172
4173 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4174 request->frame_number = internalFrameNumber;
4175 processCaptureRequest(request, internallyRequestedStreams);
4176
4177 /* Capture 2X frame*/
4178 modified_meta = modified_settings;
4179 expCompensation = GB_HDR_2X_STEP_EV;
4180 aeLock = 1;
4181 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4182 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4183 modified_settings = modified_meta.release();
4184 request->settings = modified_settings;
4185
4186 itr = internallyRequestedStreams.begin();
4187 if (itr == internallyRequestedStreams.end()) {
4188 ALOGE("Error Internally Requested Stream list is empty");
4189 assert(0);
4190 } else {
4191 itr->need_metadata = 0;
4192 itr->meteringOnly = 1;
4193 }
4194 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4195 request->frame_number = internalFrameNumber;
4196 processCaptureRequest(request, internallyRequestedStreams);
4197
4198 itr = internallyRequestedStreams.begin();
4199 if (itr == internallyRequestedStreams.end()) {
4200 ALOGE("Error Internally Requested Stream list is empty");
4201 assert(0);
4202 } else {
4203 itr->need_metadata = 1;
4204 itr->meteringOnly = 0;
4205 }
4206
4207 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4208 request->frame_number = internalFrameNumber;
4209 processCaptureRequest(request, internallyRequestedStreams);
4210
4211
4212 /* Capture 2X on original streaming config*/
4213 internallyRequestedStreams.clear();
4214
4215 /* Restore original settings pointer */
4216 request->settings = original_settings;
4217 } else {
4218 uint32_t internalFrameNumber;
4219 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4220 request->frame_number = internalFrameNumber;
4221 return processCaptureRequest(request, internallyRequestedStreams);
4222 }
4223
4224 return NO_ERROR;
4225}
4226
4227/*===========================================================================
4228 * FUNCTION : orchestrateResult
4229 *
4230 * DESCRIPTION: Orchestrates a capture result to camera service
4231 *
4232 * PARAMETERS :
4233 * @request : request from framework to process
4234 *
4235 * RETURN :
4236 *
4237 *==========================================================================*/
4238void QCamera3HardwareInterface::orchestrateResult(
4239 camera3_capture_result_t *result)
4240{
4241 uint32_t frameworkFrameNumber;
4242 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4243 frameworkFrameNumber);
4244 if (rc != NO_ERROR) {
4245 LOGE("Cannot find translated frameworkFrameNumber");
4246 assert(0);
4247 } else {
4248 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004249 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004250 } else {
4251 result->frame_number = frameworkFrameNumber;
4252 mCallbackOps->process_capture_result(mCallbackOps, result);
4253 }
4254 }
4255}
4256
4257/*===========================================================================
4258 * FUNCTION : orchestrateNotify
4259 *
4260 * DESCRIPTION: Orchestrates a notify to camera service
4261 *
4262 * PARAMETERS :
4263 * @request : request from framework to process
4264 *
4265 * RETURN :
4266 *
4267 *==========================================================================*/
4268void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4269{
4270 uint32_t frameworkFrameNumber;
4271 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004272 int32_t rc = NO_ERROR;
4273
4274 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004275 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004276
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004277 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004278 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4279 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4280 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004281 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004282 LOGE("Cannot find translated frameworkFrameNumber");
4283 assert(0);
4284 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004285 }
4286 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004287
4288 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4289 LOGD("Internal Request drop the notifyCb");
4290 } else {
4291 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4292 mCallbackOps->notify(mCallbackOps, notify_msg);
4293 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004294}
4295
4296/*===========================================================================
4297 * FUNCTION : FrameNumberRegistry
4298 *
4299 * DESCRIPTION: Constructor
4300 *
4301 * PARAMETERS :
4302 *
4303 * RETURN :
4304 *
4305 *==========================================================================*/
4306FrameNumberRegistry::FrameNumberRegistry()
4307{
4308 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4309}
4310
4311/*===========================================================================
4312 * FUNCTION : ~FrameNumberRegistry
4313 *
4314 * DESCRIPTION: Destructor
4315 *
4316 * PARAMETERS :
4317 *
4318 * RETURN :
4319 *
4320 *==========================================================================*/
4321FrameNumberRegistry::~FrameNumberRegistry()
4322{
4323}
4324
4325/*===========================================================================
4326 * FUNCTION : PurgeOldEntriesLocked
4327 *
4328 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4329 *
4330 * PARAMETERS :
4331 *
4332 * RETURN : NONE
4333 *
4334 *==========================================================================*/
4335void FrameNumberRegistry::purgeOldEntriesLocked()
4336{
4337 while (_register.begin() != _register.end()) {
4338 auto itr = _register.begin();
4339 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4340 _register.erase(itr);
4341 } else {
4342 return;
4343 }
4344 }
4345}
4346
4347/*===========================================================================
4348 * FUNCTION : allocStoreInternalFrameNumber
4349 *
4350 * DESCRIPTION: Method to note down a framework request and associate a new
4351 * internal request number against it
4352 *
4353 * PARAMETERS :
4354 * @fFrameNumber: Identifier given by framework
4355 * @internalFN : Output parameter which will have the newly generated internal
4356 * entry
4357 *
4358 * RETURN : Error code
4359 *
4360 *==========================================================================*/
4361int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4362 uint32_t &internalFrameNumber)
4363{
4364 Mutex::Autolock lock(mRegistryLock);
4365 internalFrameNumber = _nextFreeInternalNumber++;
4366 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4367 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4368 purgeOldEntriesLocked();
4369 return NO_ERROR;
4370}
4371
4372/*===========================================================================
4373 * FUNCTION : generateStoreInternalFrameNumber
4374 *
4375 * DESCRIPTION: Method to associate a new internal request number independent
4376 * of any associate with framework requests
4377 *
4378 * PARAMETERS :
4379 * @internalFrame#: Output parameter which will have the newly generated internal
4380 *
4381 *
4382 * RETURN : Error code
4383 *
4384 *==========================================================================*/
4385int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4386{
4387 Mutex::Autolock lock(mRegistryLock);
4388 internalFrameNumber = _nextFreeInternalNumber++;
4389 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4390 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4391 purgeOldEntriesLocked();
4392 return NO_ERROR;
4393}
4394
4395/*===========================================================================
4396 * FUNCTION : getFrameworkFrameNumber
4397 *
4398 * DESCRIPTION: Method to query the framework framenumber given an internal #
4399 *
4400 * PARAMETERS :
4401 * @internalFrame#: Internal reference
4402 * @frameworkframenumber: Output parameter holding framework frame entry
4403 *
4404 * RETURN : Error code
4405 *
4406 *==========================================================================*/
4407int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4408 uint32_t &frameworkFrameNumber)
4409{
4410 Mutex::Autolock lock(mRegistryLock);
4411 auto itr = _register.find(internalFrameNumber);
4412 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004413 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004414 return -ENOENT;
4415 }
4416
4417 frameworkFrameNumber = itr->second;
4418 purgeOldEntriesLocked();
4419 return NO_ERROR;
4420}
Thierry Strudel3d639192016-09-09 11:52:26 -07004421
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004422status_t QCamera3HardwareInterface::fillPbStreamConfig(
4423 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4424 QCamera3Channel *channel, uint32_t streamIndex) {
4425 if (config == nullptr) {
4426 LOGE("%s: config is null", __FUNCTION__);
4427 return BAD_VALUE;
4428 }
4429
4430 if (channel == nullptr) {
4431 LOGE("%s: channel is null", __FUNCTION__);
4432 return BAD_VALUE;
4433 }
4434
4435 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4436 if (stream == nullptr) {
4437 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4438 return NAME_NOT_FOUND;
4439 }
4440
4441 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4442 if (streamInfo == nullptr) {
4443 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4444 return NAME_NOT_FOUND;
4445 }
4446
4447 config->id = pbStreamId;
4448 config->image.width = streamInfo->dim.width;
4449 config->image.height = streamInfo->dim.height;
4450 config->image.padding = 0;
4451 config->image.format = pbStreamFormat;
4452
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004453 uint32_t totalPlaneSize = 0;
4454
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004455 // Fill plane information.
4456 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4457 pbcamera::PlaneConfiguration plane;
4458 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4459 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4460 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004461
4462 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004463 }
4464
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004465 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004466 return OK;
4467}
4468
Thierry Strudel3d639192016-09-09 11:52:26 -07004469/*===========================================================================
4470 * FUNCTION : processCaptureRequest
4471 *
4472 * DESCRIPTION: process a capture request from camera service
4473 *
4474 * PARAMETERS :
4475 * @request : request from framework to process
4476 *
4477 * RETURN :
4478 *
4479 *==========================================================================*/
4480int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004481 camera3_capture_request_t *request,
4482 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004483{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004484 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004485 int rc = NO_ERROR;
4486 int32_t request_id;
4487 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004488 bool isVidBufRequested = false;
4489 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004490 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004491
4492 pthread_mutex_lock(&mMutex);
4493
4494 // Validate current state
4495 switch (mState) {
4496 case CONFIGURED:
4497 case STARTED:
4498 /* valid state */
4499 break;
4500
4501 case ERROR:
4502 pthread_mutex_unlock(&mMutex);
4503 handleCameraDeviceError();
4504 return -ENODEV;
4505
4506 default:
4507 LOGE("Invalid state %d", mState);
4508 pthread_mutex_unlock(&mMutex);
4509 return -ENODEV;
4510 }
4511
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004512 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004513 if (rc != NO_ERROR) {
4514 LOGE("incoming request is not valid");
4515 pthread_mutex_unlock(&mMutex);
4516 return rc;
4517 }
4518
4519 meta = request->settings;
4520
4521 // For first capture request, send capture intent, and
4522 // stream on all streams
4523 if (mState == CONFIGURED) {
4524 // send an unconfigure to the backend so that the isp
4525 // resources are deallocated
4526 if (!mFirstConfiguration) {
4527 cam_stream_size_info_t stream_config_info;
4528 int32_t hal_version = CAM_HAL_V3;
4529 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4530 stream_config_info.buffer_info.min_buffers =
4531 MIN_INFLIGHT_REQUESTS;
4532 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004533 m_bIs4KVideo ? 0 :
4534 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004535 clear_metadata_buffer(mParameters);
4536 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4537 CAM_INTF_PARM_HAL_VERSION, hal_version);
4538 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4539 CAM_INTF_META_STREAM_INFO, stream_config_info);
4540 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4541 mParameters);
4542 if (rc < 0) {
4543 LOGE("set_parms for unconfigure failed");
4544 pthread_mutex_unlock(&mMutex);
4545 return rc;
4546 }
4547 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004548 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004549 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004550 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004551 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004552 property_get("persist.camera.is_type", is_type_value, "4");
4553 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4554 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4555 property_get("persist.camera.is_type_preview", is_type_value, "4");
4556 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4557 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004558
4559 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4560 int32_t hal_version = CAM_HAL_V3;
4561 uint8_t captureIntent =
4562 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4563 mCaptureIntent = captureIntent;
4564 clear_metadata_buffer(mParameters);
4565 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4566 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4567 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004568 if (mFirstConfiguration) {
4569 // configure instant AEC
4570 // Instant AEC is a session based parameter and it is needed only
4571 // once per complete session after open camera.
4572 // i.e. This is set only once for the first capture request, after open camera.
4573 setInstantAEC(meta);
4574 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004575 uint8_t fwkVideoStabMode=0;
4576 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4577 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4578 }
4579
4580 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4581 // turn it on for video/preview
4582 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4583 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004584 int32_t vsMode;
4585 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4587 rc = BAD_VALUE;
4588 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004589 LOGD("setEis %d", setEis);
4590 bool eis3Supported = false;
4591 size_t count = IS_TYPE_MAX;
4592 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4593 for (size_t i = 0; i < count; i++) {
4594 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4595 eis3Supported = true;
4596 break;
4597 }
4598 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004599
4600 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004601 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004602 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4603 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004604 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4605 is_type = isTypePreview;
4606 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4607 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4608 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004609 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004610 } else {
4611 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004612 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004613 } else {
4614 is_type = IS_TYPE_NONE;
4615 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004616 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004617 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004618 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4619 }
4620 }
4621
4622 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4623 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4624
Thierry Strudel54dc9782017-02-15 12:12:10 -08004625 //Disable tintless only if the property is set to 0
4626 memset(prop, 0, sizeof(prop));
4627 property_get("persist.camera.tintless.enable", prop, "1");
4628 int32_t tintless_value = atoi(prop);
4629
Thierry Strudel3d639192016-09-09 11:52:26 -07004630 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4631 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004632
Thierry Strudel3d639192016-09-09 11:52:26 -07004633 //Disable CDS for HFR mode or if DIS/EIS is on.
4634 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4635 //after every configure_stream
4636 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4637 (m_bIsVideo)) {
4638 int32_t cds = CAM_CDS_MODE_OFF;
4639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4640 CAM_INTF_PARM_CDS_MODE, cds))
4641 LOGE("Failed to disable CDS for HFR mode");
4642
4643 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004644
4645 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4646 uint8_t* use_av_timer = NULL;
4647
4648 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004649 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004650 use_av_timer = &m_debug_avtimer;
4651 }
4652 else{
4653 use_av_timer =
4654 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004655 if (use_av_timer) {
4656 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4657 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004658 }
4659
4660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4661 rc = BAD_VALUE;
4662 }
4663 }
4664
Thierry Strudel3d639192016-09-09 11:52:26 -07004665 setMobicat();
4666
4667 /* Set fps and hfr mode while sending meta stream info so that sensor
4668 * can configure appropriate streaming mode */
4669 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004670 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4671 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004672 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4673 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004674 if (rc == NO_ERROR) {
4675 int32_t max_fps =
4676 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004677 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004678 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4679 }
4680 /* For HFR, more buffers are dequeued upfront to improve the performance */
4681 if (mBatchSize) {
4682 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4683 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4684 }
4685 }
4686 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004687 LOGE("setHalFpsRange failed");
4688 }
4689 }
4690 if (meta.exists(ANDROID_CONTROL_MODE)) {
4691 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4692 rc = extractSceneMode(meta, metaMode, mParameters);
4693 if (rc != NO_ERROR) {
4694 LOGE("extractSceneMode failed");
4695 }
4696 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004697 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004698
Thierry Strudel04e026f2016-10-10 11:27:36 -07004699 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4700 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4701 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4702 rc = setVideoHdrMode(mParameters, vhdr);
4703 if (rc != NO_ERROR) {
4704 LOGE("setVideoHDR is failed");
4705 }
4706 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004707
Thierry Strudel3d639192016-09-09 11:52:26 -07004708 //TODO: validate the arguments, HSV scenemode should have only the
4709 //advertised fps ranges
4710
4711 /*set the capture intent, hal version, tintless, stream info,
4712 *and disenable parameters to the backend*/
4713 LOGD("set_parms META_STREAM_INFO " );
4714 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004715 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4716 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004717 mStreamConfigInfo.type[i],
4718 mStreamConfigInfo.stream_sizes[i].width,
4719 mStreamConfigInfo.stream_sizes[i].height,
4720 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004721 mStreamConfigInfo.format[i],
4722 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004723 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004724
Thierry Strudel3d639192016-09-09 11:52:26 -07004725 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4726 mParameters);
4727 if (rc < 0) {
4728 LOGE("set_parms failed for hal version, stream info");
4729 }
4730
Chien-Yu Chenee335912017-02-09 17:53:20 -08004731 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4732 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004733 if (rc != NO_ERROR) {
4734 LOGE("Failed to get sensor output size");
4735 pthread_mutex_unlock(&mMutex);
4736 goto error_exit;
4737 }
4738
4739 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4740 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004741 mSensorModeInfo.active_array_size.width,
4742 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004743
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004744 if (gHdrPlusClient != nullptr) {
4745 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4746 if (rc != OK) {
4747 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4748 mCameraId, mSensorModeInfo.op_pixel_clk);
4749 pthread_mutex_unlock(&mMutex);
4750 goto error_exit;
4751 }
4752 }
4753
Thierry Strudel3d639192016-09-09 11:52:26 -07004754 /* Set batchmode before initializing channel. Since registerBuffer
4755 * internally initializes some of the channels, better set batchmode
4756 * even before first register buffer */
4757 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4758 it != mStreamInfo.end(); it++) {
4759 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4760 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4761 && mBatchSize) {
4762 rc = channel->setBatchSize(mBatchSize);
4763 //Disable per frame map unmap for HFR/batchmode case
4764 rc |= channel->setPerFrameMapUnmap(false);
4765 if (NO_ERROR != rc) {
4766 LOGE("Channel init failed %d", rc);
4767 pthread_mutex_unlock(&mMutex);
4768 goto error_exit;
4769 }
4770 }
4771 }
4772
4773 //First initialize all streams
4774 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4775 it != mStreamInfo.end(); it++) {
4776 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4777 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4778 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004779 setEis) {
4780 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4781 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4782 is_type = mStreamConfigInfo.is_type[i];
4783 break;
4784 }
4785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004786 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004787 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004788 rc = channel->initialize(IS_TYPE_NONE);
4789 }
4790 if (NO_ERROR != rc) {
4791 LOGE("Channel initialization failed %d", rc);
4792 pthread_mutex_unlock(&mMutex);
4793 goto error_exit;
4794 }
4795 }
4796
4797 if (mRawDumpChannel) {
4798 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4799 if (rc != NO_ERROR) {
4800 LOGE("Error: Raw Dump Channel init failed");
4801 pthread_mutex_unlock(&mMutex);
4802 goto error_exit;
4803 }
4804 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004805 if (mHdrPlusRawSrcChannel) {
4806 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4807 if (rc != NO_ERROR) {
4808 LOGE("Error: HDR+ RAW Source Channel init failed");
4809 pthread_mutex_unlock(&mMutex);
4810 goto error_exit;
4811 }
4812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004813 if (mSupportChannel) {
4814 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4815 if (rc < 0) {
4816 LOGE("Support channel initialization failed");
4817 pthread_mutex_unlock(&mMutex);
4818 goto error_exit;
4819 }
4820 }
4821 if (mAnalysisChannel) {
4822 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4823 if (rc < 0) {
4824 LOGE("Analysis channel initialization failed");
4825 pthread_mutex_unlock(&mMutex);
4826 goto error_exit;
4827 }
4828 }
4829 if (mDummyBatchChannel) {
4830 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4831 if (rc < 0) {
4832 LOGE("mDummyBatchChannel setBatchSize failed");
4833 pthread_mutex_unlock(&mMutex);
4834 goto error_exit;
4835 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004836 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 if (rc < 0) {
4838 LOGE("mDummyBatchChannel initialization failed");
4839 pthread_mutex_unlock(&mMutex);
4840 goto error_exit;
4841 }
4842 }
4843
4844 // Set bundle info
4845 rc = setBundleInfo();
4846 if (rc < 0) {
4847 LOGE("setBundleInfo failed %d", rc);
4848 pthread_mutex_unlock(&mMutex);
4849 goto error_exit;
4850 }
4851
4852 //update settings from app here
4853 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4854 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4855 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4856 }
4857 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4858 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4859 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4860 }
4861 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4862 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4863 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4864
4865 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4866 (mLinkedCameraId != mCameraId) ) {
4867 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4868 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004869 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004870 goto error_exit;
4871 }
4872 }
4873
4874 // add bundle related cameras
4875 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4876 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004877 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4878 &m_pDualCamCmdPtr->bundle_info;
4879 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004880 if (mIsDeviceLinked)
4881 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4882 else
4883 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4884
4885 pthread_mutex_lock(&gCamLock);
4886
4887 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4888 LOGE("Dualcam: Invalid Session Id ");
4889 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004890 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 goto error_exit;
4892 }
4893
4894 if (mIsMainCamera == 1) {
4895 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4896 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004897 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004898 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 // related session id should be session id of linked session
4900 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4901 } else {
4902 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4903 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004904 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004905 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4907 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004908 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 pthread_mutex_unlock(&gCamLock);
4910
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004911 rc = mCameraHandle->ops->set_dual_cam_cmd(
4912 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 if (rc < 0) {
4914 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004916 goto error_exit;
4917 }
4918 }
4919
4920 //Then start them.
4921 LOGH("Start META Channel");
4922 rc = mMetadataChannel->start();
4923 if (rc < 0) {
4924 LOGE("META channel start failed");
4925 pthread_mutex_unlock(&mMutex);
4926 goto error_exit;
4927 }
4928
4929 if (mAnalysisChannel) {
4930 rc = mAnalysisChannel->start();
4931 if (rc < 0) {
4932 LOGE("Analysis channel start failed");
4933 mMetadataChannel->stop();
4934 pthread_mutex_unlock(&mMutex);
4935 goto error_exit;
4936 }
4937 }
4938
4939 if (mSupportChannel) {
4940 rc = mSupportChannel->start();
4941 if (rc < 0) {
4942 LOGE("Support channel start failed");
4943 mMetadataChannel->stop();
4944 /* Although support and analysis are mutually exclusive today
4945 adding it in anycase for future proofing */
4946 if (mAnalysisChannel) {
4947 mAnalysisChannel->stop();
4948 }
4949 pthread_mutex_unlock(&mMutex);
4950 goto error_exit;
4951 }
4952 }
4953 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4954 it != mStreamInfo.end(); it++) {
4955 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4956 LOGH("Start Processing Channel mask=%d",
4957 channel->getStreamTypeMask());
4958 rc = channel->start();
4959 if (rc < 0) {
4960 LOGE("channel start failed");
4961 pthread_mutex_unlock(&mMutex);
4962 goto error_exit;
4963 }
4964 }
4965
4966 if (mRawDumpChannel) {
4967 LOGD("Starting raw dump stream");
4968 rc = mRawDumpChannel->start();
4969 if (rc != NO_ERROR) {
4970 LOGE("Error Starting Raw Dump Channel");
4971 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4972 it != mStreamInfo.end(); it++) {
4973 QCamera3Channel *channel =
4974 (QCamera3Channel *)(*it)->stream->priv;
4975 LOGH("Stopping Processing Channel mask=%d",
4976 channel->getStreamTypeMask());
4977 channel->stop();
4978 }
4979 if (mSupportChannel)
4980 mSupportChannel->stop();
4981 if (mAnalysisChannel) {
4982 mAnalysisChannel->stop();
4983 }
4984 mMetadataChannel->stop();
4985 pthread_mutex_unlock(&mMutex);
4986 goto error_exit;
4987 }
4988 }
4989
4990 if (mChannelHandle) {
4991
4992 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4993 mChannelHandle);
4994 if (rc != NO_ERROR) {
4995 LOGE("start_channel failed %d", rc);
4996 pthread_mutex_unlock(&mMutex);
4997 goto error_exit;
4998 }
4999 }
5000
5001 goto no_error;
5002error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005003 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 return rc;
5005no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 mWokenUpByDaemon = false;
5007 mPendingLiveRequest = 0;
5008 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 }
5010
Chien-Yu Chenee335912017-02-09 17:53:20 -08005011 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005012 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005013 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5014 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5015 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5016 rc = enableHdrPlusModeLocked();
5017 if (rc != OK) {
5018 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5019 pthread_mutex_unlock(&mMutex);
5020 return rc;
5021 }
5022
5023 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5024 if (mHdrPlusRawSrcChannel) {
5025 rc = mHdrPlusRawSrcChannel->start();
5026 if (rc != OK) {
5027 LOGE("Error Starting HDR+ RAW Channel");
5028 pthread_mutex_unlock(&mMutex);
5029 return rc;
5030 }
5031 }
5032 mFirstPreviewIntentSeen = true;
5033 }
5034
Thierry Strudel3d639192016-09-09 11:52:26 -07005035 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005036 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005037
5038 if (mFlushPerf) {
5039 //we cannot accept any requests during flush
5040 LOGE("process_capture_request cannot proceed during flush");
5041 pthread_mutex_unlock(&mMutex);
5042 return NO_ERROR; //should return an error
5043 }
5044
5045 if (meta.exists(ANDROID_REQUEST_ID)) {
5046 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5047 mCurrentRequestId = request_id;
5048 LOGD("Received request with id: %d", request_id);
5049 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5050 LOGE("Unable to find request id field, \
5051 & no previous id available");
5052 pthread_mutex_unlock(&mMutex);
5053 return NAME_NOT_FOUND;
5054 } else {
5055 LOGD("Re-using old request id");
5056 request_id = mCurrentRequestId;
5057 }
5058
5059 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5060 request->num_output_buffers,
5061 request->input_buffer,
5062 frameNumber);
5063 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005064 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005065 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005066 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005067 uint32_t snapshotStreamId = 0;
5068 for (size_t i = 0; i < request->num_output_buffers; i++) {
5069 const camera3_stream_buffer_t& output = request->output_buffers[i];
5070 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5071
Emilian Peev7650c122017-01-19 08:24:33 -08005072 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5073 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005074 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005075 blob_request = 1;
5076 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5077 }
5078
5079 if (output.acquire_fence != -1) {
5080 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5081 close(output.acquire_fence);
5082 if (rc != OK) {
5083 LOGE("sync wait failed %d", rc);
5084 pthread_mutex_unlock(&mMutex);
5085 return rc;
5086 }
5087 }
5088
Emilian Peev7650c122017-01-19 08:24:33 -08005089 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
5090 depthRequestPresent = true;
5091 continue;
5092 }
5093
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005094 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005096
5097 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5098 isVidBufRequested = true;
5099 }
5100 }
5101
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005102 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5103 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5104 itr++) {
5105 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5106 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5107 channel->getStreamID(channel->getStreamTypeMask());
5108
5109 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5110 isVidBufRequested = true;
5111 }
5112 }
5113
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005115 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005116 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005117 }
5118 if (blob_request && mRawDumpChannel) {
5119 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005120 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005121 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005122 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 }
5124
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005125 {
5126 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5127 // Request a RAW buffer if
5128 // 1. mHdrPlusRawSrcChannel is valid.
5129 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5130 // 3. There is no pending HDR+ request.
5131 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5132 mHdrPlusPendingRequests.size() == 0) {
5133 streamsArray.stream_request[streamsArray.num_streams].streamID =
5134 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5135 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5136 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005137 }
5138
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005139 //extract capture intent
5140 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5141 mCaptureIntent =
5142 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5143 }
5144
5145 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5146 mCacMode =
5147 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5148 }
5149
5150 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005151 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005152
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005153 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005154 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005155 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5156 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005157 }
5158
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005159 if (hdrPlusRequest) {
5160 // For a HDR+ request, just set the frame parameters.
5161 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5162 if (rc < 0) {
5163 LOGE("fail to set frame parameters");
5164 pthread_mutex_unlock(&mMutex);
5165 return rc;
5166 }
5167 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 /* Parse the settings:
5169 * - For every request in NORMAL MODE
5170 * - For every request in HFR mode during preview only case
5171 * - For first request of every batch in HFR mode during video
5172 * recording. In batchmode the same settings except frame number is
5173 * repeated in each request of the batch.
5174 */
5175 if (!mBatchSize ||
5176 (mBatchSize && !isVidBufRequested) ||
5177 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005178 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005179 if (rc < 0) {
5180 LOGE("fail to set frame parameters");
5181 pthread_mutex_unlock(&mMutex);
5182 return rc;
5183 }
5184 }
5185 /* For batchMode HFR, setFrameParameters is not called for every
5186 * request. But only frame number of the latest request is parsed.
5187 * Keep track of first and last frame numbers in a batch so that
5188 * metadata for the frame numbers of batch can be duplicated in
5189 * handleBatchMetadta */
5190 if (mBatchSize) {
5191 if (!mToBeQueuedVidBufs) {
5192 //start of the batch
5193 mFirstFrameNumberInBatch = request->frame_number;
5194 }
5195 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5196 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5197 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005198 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 return BAD_VALUE;
5200 }
5201 }
5202 if (mNeedSensorRestart) {
5203 /* Unlock the mutex as restartSensor waits on the channels to be
5204 * stopped, which in turn calls stream callback functions -
5205 * handleBufferWithLock and handleMetadataWithLock */
5206 pthread_mutex_unlock(&mMutex);
5207 rc = dynamicUpdateMetaStreamInfo();
5208 if (rc != NO_ERROR) {
5209 LOGE("Restarting the sensor failed");
5210 return BAD_VALUE;
5211 }
5212 mNeedSensorRestart = false;
5213 pthread_mutex_lock(&mMutex);
5214 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005215 if(mResetInstantAEC) {
5216 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5217 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5218 mResetInstantAEC = false;
5219 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005220 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 if (request->input_buffer->acquire_fence != -1) {
5222 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5223 close(request->input_buffer->acquire_fence);
5224 if (rc != OK) {
5225 LOGE("input buffer sync wait failed %d", rc);
5226 pthread_mutex_unlock(&mMutex);
5227 return rc;
5228 }
5229 }
5230 }
5231
5232 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5233 mLastCustIntentFrmNum = frameNumber;
5234 }
5235 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005236 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005237 pendingRequestIterator latestRequest;
5238 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005239 pendingRequest.num_buffers = depthRequestPresent ?
5240 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 pendingRequest.request_id = request_id;
5242 pendingRequest.blob_request = blob_request;
5243 pendingRequest.timestamp = 0;
5244 pendingRequest.bUrgentReceived = 0;
5245 if (request->input_buffer) {
5246 pendingRequest.input_buffer =
5247 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5248 *(pendingRequest.input_buffer) = *(request->input_buffer);
5249 pInputBuffer = pendingRequest.input_buffer;
5250 } else {
5251 pendingRequest.input_buffer = NULL;
5252 pInputBuffer = NULL;
5253 }
5254
5255 pendingRequest.pipeline_depth = 0;
5256 pendingRequest.partial_result_cnt = 0;
5257 extractJpegMetadata(mCurJpegMeta, request);
5258 pendingRequest.jpegMetadata = mCurJpegMeta;
5259 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5260 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005262 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5263 mHybridAeEnable =
5264 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5265 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005266
5267 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5268 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005269 /* DevCamDebug metadata processCaptureRequest */
5270 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5271 mDevCamDebugMetaEnable =
5272 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5273 }
5274 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5275 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005276
5277 //extract CAC info
5278 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5279 mCacMode =
5280 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5281 }
5282 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005283 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005284
5285 PendingBuffersInRequest bufsForCurRequest;
5286 bufsForCurRequest.frame_number = frameNumber;
5287 // Mark current timestamp for the new request
5288 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005289 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005290
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005291 if (hdrPlusRequest) {
5292 // Save settings for this request.
5293 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5294 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5295
5296 // Add to pending HDR+ request queue.
5297 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5298 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5299
5300 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5301 }
5302
Thierry Strudel3d639192016-09-09 11:52:26 -07005303 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005304 if (request->output_buffers[i].stream->data_space ==
5305 HAL_DATASPACE_DEPTH) {
5306 continue;
5307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 RequestedBufferInfo requestedBuf;
5309 memset(&requestedBuf, 0, sizeof(requestedBuf));
5310 requestedBuf.stream = request->output_buffers[i].stream;
5311 requestedBuf.buffer = NULL;
5312 pendingRequest.buffers.push_back(requestedBuf);
5313
5314 // Add to buffer handle the pending buffers list
5315 PendingBufferInfo bufferInfo;
5316 bufferInfo.buffer = request->output_buffers[i].buffer;
5317 bufferInfo.stream = request->output_buffers[i].stream;
5318 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5319 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5320 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5321 frameNumber, bufferInfo.buffer,
5322 channel->getStreamTypeMask(), bufferInfo.stream->format);
5323 }
5324 // Add this request packet into mPendingBuffersMap
5325 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5326 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5327 mPendingBuffersMap.get_num_overall_buffers());
5328
5329 latestRequest = mPendingRequestsList.insert(
5330 mPendingRequestsList.end(), pendingRequest);
5331 if(mFlush) {
5332 LOGI("mFlush is true");
5333 pthread_mutex_unlock(&mMutex);
5334 return NO_ERROR;
5335 }
5336
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005337 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5338 // channel.
5339 if (!hdrPlusRequest) {
5340 int indexUsed;
5341 // Notify metadata channel we receive a request
5342 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005343
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005344 if(request->input_buffer != NULL){
5345 LOGD("Input request, frame_number %d", frameNumber);
5346 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5347 if (NO_ERROR != rc) {
5348 LOGE("fail to set reproc parameters");
5349 pthread_mutex_unlock(&mMutex);
5350 return rc;
5351 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 }
5353
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005354 // Call request on other streams
5355 uint32_t streams_need_metadata = 0;
5356 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5357 for (size_t i = 0; i < request->num_output_buffers; i++) {
5358 const camera3_stream_buffer_t& output = request->output_buffers[i];
5359 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5360
5361 if (channel == NULL) {
5362 LOGW("invalid channel pointer for stream");
5363 continue;
5364 }
5365
5366 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5367 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5368 output.buffer, request->input_buffer, frameNumber);
5369 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5372 if (rc < 0) {
5373 LOGE("Fail to request on picture channel");
5374 pthread_mutex_unlock(&mMutex);
5375 return rc;
5376 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005377 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005378 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5379 assert(NULL != mDepthChannel);
5380 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005381
Emilian Peev7650c122017-01-19 08:24:33 -08005382 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5383 if (rc < 0) {
5384 LOGE("Fail to map on depth buffer");
5385 pthread_mutex_unlock(&mMutex);
5386 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005387 }
Emilian Peev7650c122017-01-19 08:24:33 -08005388 } else {
5389 LOGD("snapshot request with buffer %p, frame_number %d",
5390 output.buffer, frameNumber);
5391 if (!request->settings) {
5392 rc = channel->request(output.buffer, frameNumber,
5393 NULL, mPrevParameters, indexUsed);
5394 } else {
5395 rc = channel->request(output.buffer, frameNumber,
5396 NULL, mParameters, indexUsed);
5397 }
5398 if (rc < 0) {
5399 LOGE("Fail to request on picture channel");
5400 pthread_mutex_unlock(&mMutex);
5401 return rc;
5402 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005403
Emilian Peev7650c122017-01-19 08:24:33 -08005404 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5405 uint32_t j = 0;
5406 for (j = 0; j < streamsArray.num_streams; j++) {
5407 if (streamsArray.stream_request[j].streamID == streamId) {
5408 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5409 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5410 else
5411 streamsArray.stream_request[j].buf_index = indexUsed;
5412 break;
5413 }
5414 }
5415 if (j == streamsArray.num_streams) {
5416 LOGE("Did not find matching stream to update index");
5417 assert(0);
5418 }
5419
5420 pendingBufferIter->need_metadata = true;
5421 streams_need_metadata++;
5422 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005424 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5425 bool needMetadata = false;
5426 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5427 rc = yuvChannel->request(output.buffer, frameNumber,
5428 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5429 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005430 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005431 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005432 pthread_mutex_unlock(&mMutex);
5433 return rc;
5434 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005435
5436 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5437 uint32_t j = 0;
5438 for (j = 0; j < streamsArray.num_streams; j++) {
5439 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005440 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5441 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5442 else
5443 streamsArray.stream_request[j].buf_index = indexUsed;
5444 break;
5445 }
5446 }
5447 if (j == streamsArray.num_streams) {
5448 LOGE("Did not find matching stream to update index");
5449 assert(0);
5450 }
5451
5452 pendingBufferIter->need_metadata = needMetadata;
5453 if (needMetadata)
5454 streams_need_metadata += 1;
5455 LOGD("calling YUV channel request, need_metadata is %d",
5456 needMetadata);
5457 } else {
5458 LOGD("request with buffer %p, frame_number %d",
5459 output.buffer, frameNumber);
5460
5461 rc = channel->request(output.buffer, frameNumber, indexUsed);
5462
5463 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5464 uint32_t j = 0;
5465 for (j = 0; j < streamsArray.num_streams; j++) {
5466 if (streamsArray.stream_request[j].streamID == streamId) {
5467 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5468 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5469 else
5470 streamsArray.stream_request[j].buf_index = indexUsed;
5471 break;
5472 }
5473 }
5474 if (j == streamsArray.num_streams) {
5475 LOGE("Did not find matching stream to update index");
5476 assert(0);
5477 }
5478
5479 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5480 && mBatchSize) {
5481 mToBeQueuedVidBufs++;
5482 if (mToBeQueuedVidBufs == mBatchSize) {
5483 channel->queueBatchBuf();
5484 }
5485 }
5486 if (rc < 0) {
5487 LOGE("request failed");
5488 pthread_mutex_unlock(&mMutex);
5489 return rc;
5490 }
5491 }
5492 pendingBufferIter++;
5493 }
5494
5495 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5496 itr++) {
5497 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5498
5499 if (channel == NULL) {
5500 LOGE("invalid channel pointer for stream");
5501 assert(0);
5502 return BAD_VALUE;
5503 }
5504
5505 InternalRequest requestedStream;
5506 requestedStream = (*itr);
5507
5508
5509 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5510 LOGD("snapshot request internally input buffer %p, frame_number %d",
5511 request->input_buffer, frameNumber);
5512 if(request->input_buffer != NULL){
5513 rc = channel->request(NULL, frameNumber,
5514 pInputBuffer, &mReprocMeta, indexUsed, true,
5515 requestedStream.meteringOnly);
5516 if (rc < 0) {
5517 LOGE("Fail to request on picture channel");
5518 pthread_mutex_unlock(&mMutex);
5519 return rc;
5520 }
5521 } else {
5522 LOGD("snapshot request with frame_number %d", frameNumber);
5523 if (!request->settings) {
5524 rc = channel->request(NULL, frameNumber,
5525 NULL, mPrevParameters, indexUsed, true,
5526 requestedStream.meteringOnly);
5527 } else {
5528 rc = channel->request(NULL, frameNumber,
5529 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5530 }
5531 if (rc < 0) {
5532 LOGE("Fail to request on picture channel");
5533 pthread_mutex_unlock(&mMutex);
5534 return rc;
5535 }
5536
5537 if ((*itr).meteringOnly != 1) {
5538 requestedStream.need_metadata = 1;
5539 streams_need_metadata++;
5540 }
5541 }
5542
5543 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5544 uint32_t j = 0;
5545 for (j = 0; j < streamsArray.num_streams; j++) {
5546 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005547 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5548 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5549 else
5550 streamsArray.stream_request[j].buf_index = indexUsed;
5551 break;
5552 }
5553 }
5554 if (j == streamsArray.num_streams) {
5555 LOGE("Did not find matching stream to update index");
5556 assert(0);
5557 }
5558
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005559 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005560 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005561 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005562 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005563 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005564 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005565 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005566
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005567 //If 2 streams have need_metadata set to true, fail the request, unless
5568 //we copy/reference count the metadata buffer
5569 if (streams_need_metadata > 1) {
5570 LOGE("not supporting request in which two streams requires"
5571 " 2 HAL metadata for reprocessing");
5572 pthread_mutex_unlock(&mMutex);
5573 return -EINVAL;
5574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005575
Emilian Peev7650c122017-01-19 08:24:33 -08005576 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5578 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5579 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5580 pthread_mutex_unlock(&mMutex);
5581 return BAD_VALUE;
5582 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005583 if (request->input_buffer == NULL) {
5584 /* Set the parameters to backend:
5585 * - For every request in NORMAL MODE
5586 * - For every request in HFR mode during preview only case
5587 * - Once every batch in HFR mode during video recording
5588 */
5589 if (!mBatchSize ||
5590 (mBatchSize && !isVidBufRequested) ||
5591 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5592 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5593 mBatchSize, isVidBufRequested,
5594 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005595
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005596 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5597 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5598 uint32_t m = 0;
5599 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5600 if (streamsArray.stream_request[k].streamID ==
5601 mBatchedStreamsArray.stream_request[m].streamID)
5602 break;
5603 }
5604 if (m == mBatchedStreamsArray.num_streams) {
5605 mBatchedStreamsArray.stream_request\
5606 [mBatchedStreamsArray.num_streams].streamID =
5607 streamsArray.stream_request[k].streamID;
5608 mBatchedStreamsArray.stream_request\
5609 [mBatchedStreamsArray.num_streams].buf_index =
5610 streamsArray.stream_request[k].buf_index;
5611 mBatchedStreamsArray.num_streams =
5612 mBatchedStreamsArray.num_streams + 1;
5613 }
5614 }
5615 streamsArray = mBatchedStreamsArray;
5616 }
5617 /* Update stream id of all the requested buffers */
5618 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5619 streamsArray)) {
5620 LOGE("Failed to set stream type mask in the parameters");
5621 return BAD_VALUE;
5622 }
5623
5624 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5625 mParameters);
5626 if (rc < 0) {
5627 LOGE("set_parms failed");
5628 }
5629 /* reset to zero coz, the batch is queued */
5630 mToBeQueuedVidBufs = 0;
5631 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5632 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5633 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005634 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5635 uint32_t m = 0;
5636 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5637 if (streamsArray.stream_request[k].streamID ==
5638 mBatchedStreamsArray.stream_request[m].streamID)
5639 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005640 }
5641 if (m == mBatchedStreamsArray.num_streams) {
5642 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5643 streamID = streamsArray.stream_request[k].streamID;
5644 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5645 buf_index = streamsArray.stream_request[k].buf_index;
5646 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5647 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005648 }
5649 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005650 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005652 }
5653
5654 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5655
5656 mState = STARTED;
5657 // Added a timed condition wait
5658 struct timespec ts;
5659 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005660 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005661 if (rc < 0) {
5662 isValidTimeout = 0;
5663 LOGE("Error reading the real time clock!!");
5664 }
5665 else {
5666 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005667 int64_t timeout = 5;
5668 {
5669 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5670 // If there is a pending HDR+ request, the following requests may be blocked until the
5671 // HDR+ request is done. So allow a longer timeout.
5672 if (mHdrPlusPendingRequests.size() > 0) {
5673 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5674 }
5675 }
5676 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005677 }
5678 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005679 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005680 (mState != ERROR) && (mState != DEINIT)) {
5681 if (!isValidTimeout) {
5682 LOGD("Blocking on conditional wait");
5683 pthread_cond_wait(&mRequestCond, &mMutex);
5684 }
5685 else {
5686 LOGD("Blocking on timed conditional wait");
5687 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5688 if (rc == ETIMEDOUT) {
5689 rc = -ENODEV;
5690 LOGE("Unblocked on timeout!!!!");
5691 break;
5692 }
5693 }
5694 LOGD("Unblocked");
5695 if (mWokenUpByDaemon) {
5696 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005697 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005698 break;
5699 }
5700 }
5701 pthread_mutex_unlock(&mMutex);
5702
5703 return rc;
5704}
5705
5706/*===========================================================================
5707 * FUNCTION : dump
5708 *
5709 * DESCRIPTION:
5710 *
5711 * PARAMETERS :
5712 *
5713 *
5714 * RETURN :
5715 *==========================================================================*/
5716void QCamera3HardwareInterface::dump(int fd)
5717{
5718 pthread_mutex_lock(&mMutex);
5719 dprintf(fd, "\n Camera HAL3 information Begin \n");
5720
5721 dprintf(fd, "\nNumber of pending requests: %zu \n",
5722 mPendingRequestsList.size());
5723 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5724 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5725 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5726 for(pendingRequestIterator i = mPendingRequestsList.begin();
5727 i != mPendingRequestsList.end(); i++) {
5728 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5729 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5730 i->input_buffer);
5731 }
5732 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5733 mPendingBuffersMap.get_num_overall_buffers());
5734 dprintf(fd, "-------+------------------\n");
5735 dprintf(fd, " Frame | Stream type mask \n");
5736 dprintf(fd, "-------+------------------\n");
5737 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5738 for(auto &j : req.mPendingBufferList) {
5739 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5740 dprintf(fd, " %5d | %11d \n",
5741 req.frame_number, channel->getStreamTypeMask());
5742 }
5743 }
5744 dprintf(fd, "-------+------------------\n");
5745
5746 dprintf(fd, "\nPending frame drop list: %zu\n",
5747 mPendingFrameDropList.size());
5748 dprintf(fd, "-------+-----------\n");
5749 dprintf(fd, " Frame | Stream ID \n");
5750 dprintf(fd, "-------+-----------\n");
5751 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5752 i != mPendingFrameDropList.end(); i++) {
5753 dprintf(fd, " %5d | %9d \n",
5754 i->frame_number, i->stream_ID);
5755 }
5756 dprintf(fd, "-------+-----------\n");
5757
5758 dprintf(fd, "\n Camera HAL3 information End \n");
5759
5760 /* use dumpsys media.camera as trigger to send update debug level event */
5761 mUpdateDebugLevel = true;
5762 pthread_mutex_unlock(&mMutex);
5763 return;
5764}
5765
5766/*===========================================================================
5767 * FUNCTION : flush
5768 *
5769 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5770 * conditionally restarts channels
5771 *
5772 * PARAMETERS :
5773 * @ restartChannels: re-start all channels
5774 *
5775 *
5776 * RETURN :
5777 * 0 on success
5778 * Error code on failure
5779 *==========================================================================*/
5780int QCamera3HardwareInterface::flush(bool restartChannels)
5781{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005782 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005783 int32_t rc = NO_ERROR;
5784
5785 LOGD("Unblocking Process Capture Request");
5786 pthread_mutex_lock(&mMutex);
5787 mFlush = true;
5788 pthread_mutex_unlock(&mMutex);
5789
5790 rc = stopAllChannels();
5791 // unlink of dualcam
5792 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005793 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5794 &m_pDualCamCmdPtr->bundle_info;
5795 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005796 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5797 pthread_mutex_lock(&gCamLock);
5798
5799 if (mIsMainCamera == 1) {
5800 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5801 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005802 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005803 // related session id should be session id of linked session
5804 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5805 } else {
5806 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5807 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005808 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005809 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5810 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005811 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005812 pthread_mutex_unlock(&gCamLock);
5813
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005814 rc = mCameraHandle->ops->set_dual_cam_cmd(
5815 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005816 if (rc < 0) {
5817 LOGE("Dualcam: Unlink failed, but still proceed to close");
5818 }
5819 }
5820
5821 if (rc < 0) {
5822 LOGE("stopAllChannels failed");
5823 return rc;
5824 }
5825 if (mChannelHandle) {
5826 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5827 mChannelHandle);
5828 }
5829
5830 // Reset bundle info
5831 rc = setBundleInfo();
5832 if (rc < 0) {
5833 LOGE("setBundleInfo failed %d", rc);
5834 return rc;
5835 }
5836
5837 // Mutex Lock
5838 pthread_mutex_lock(&mMutex);
5839
5840 // Unblock process_capture_request
5841 mPendingLiveRequest = 0;
5842 pthread_cond_signal(&mRequestCond);
5843
5844 rc = notifyErrorForPendingRequests();
5845 if (rc < 0) {
5846 LOGE("notifyErrorForPendingRequests failed");
5847 pthread_mutex_unlock(&mMutex);
5848 return rc;
5849 }
5850
5851 mFlush = false;
5852
5853 // Start the Streams/Channels
5854 if (restartChannels) {
5855 rc = startAllChannels();
5856 if (rc < 0) {
5857 LOGE("startAllChannels failed");
5858 pthread_mutex_unlock(&mMutex);
5859 return rc;
5860 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005861 if (mChannelHandle) {
5862 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5863 mChannelHandle);
5864 if (rc < 0) {
5865 LOGE("start_channel failed");
5866 pthread_mutex_unlock(&mMutex);
5867 return rc;
5868 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005869 }
5870 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005871 pthread_mutex_unlock(&mMutex);
5872
5873 return 0;
5874}
5875
5876/*===========================================================================
5877 * FUNCTION : flushPerf
5878 *
5879 * DESCRIPTION: This is the performance optimization version of flush that does
5880 * not use stream off, rather flushes the system
5881 *
5882 * PARAMETERS :
5883 *
5884 *
5885 * RETURN : 0 : success
5886 * -EINVAL: input is malformed (device is not valid)
5887 * -ENODEV: if the device has encountered a serious error
5888 *==========================================================================*/
5889int QCamera3HardwareInterface::flushPerf()
5890{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005891 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005892 int32_t rc = 0;
5893 struct timespec timeout;
5894 bool timed_wait = false;
5895
5896 pthread_mutex_lock(&mMutex);
5897 mFlushPerf = true;
5898 mPendingBuffersMap.numPendingBufsAtFlush =
5899 mPendingBuffersMap.get_num_overall_buffers();
5900 LOGD("Calling flush. Wait for %d buffers to return",
5901 mPendingBuffersMap.numPendingBufsAtFlush);
5902
5903 /* send the flush event to the backend */
5904 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5905 if (rc < 0) {
5906 LOGE("Error in flush: IOCTL failure");
5907 mFlushPerf = false;
5908 pthread_mutex_unlock(&mMutex);
5909 return -ENODEV;
5910 }
5911
5912 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5913 LOGD("No pending buffers in HAL, return flush");
5914 mFlushPerf = false;
5915 pthread_mutex_unlock(&mMutex);
5916 return rc;
5917 }
5918
5919 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005920 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005921 if (rc < 0) {
5922 LOGE("Error reading the real time clock, cannot use timed wait");
5923 } else {
5924 timeout.tv_sec += FLUSH_TIMEOUT;
5925 timed_wait = true;
5926 }
5927
5928 //Block on conditional variable
5929 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5930 LOGD("Waiting on mBuffersCond");
5931 if (!timed_wait) {
5932 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5933 if (rc != 0) {
5934 LOGE("pthread_cond_wait failed due to rc = %s",
5935 strerror(rc));
5936 break;
5937 }
5938 } else {
5939 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5940 if (rc != 0) {
5941 LOGE("pthread_cond_timedwait failed due to rc = %s",
5942 strerror(rc));
5943 break;
5944 }
5945 }
5946 }
5947 if (rc != 0) {
5948 mFlushPerf = false;
5949 pthread_mutex_unlock(&mMutex);
5950 return -ENODEV;
5951 }
5952
5953 LOGD("Received buffers, now safe to return them");
5954
5955 //make sure the channels handle flush
5956 //currently only required for the picture channel to release snapshot resources
5957 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5958 it != mStreamInfo.end(); it++) {
5959 QCamera3Channel *channel = (*it)->channel;
5960 if (channel) {
5961 rc = channel->flush();
5962 if (rc) {
5963 LOGE("Flushing the channels failed with error %d", rc);
5964 // even though the channel flush failed we need to continue and
5965 // return the buffers we have to the framework, however the return
5966 // value will be an error
5967 rc = -ENODEV;
5968 }
5969 }
5970 }
5971
5972 /* notify the frameworks and send errored results */
5973 rc = notifyErrorForPendingRequests();
5974 if (rc < 0) {
5975 LOGE("notifyErrorForPendingRequests failed");
5976 pthread_mutex_unlock(&mMutex);
5977 return rc;
5978 }
5979
5980 //unblock process_capture_request
5981 mPendingLiveRequest = 0;
5982 unblockRequestIfNecessary();
5983
5984 mFlushPerf = false;
5985 pthread_mutex_unlock(&mMutex);
5986 LOGD ("Flush Operation complete. rc = %d", rc);
5987 return rc;
5988}
5989
5990/*===========================================================================
5991 * FUNCTION : handleCameraDeviceError
5992 *
5993 * DESCRIPTION: This function calls internal flush and notifies the error to
5994 * framework and updates the state variable.
5995 *
5996 * PARAMETERS : None
5997 *
5998 * RETURN : NO_ERROR on Success
5999 * Error code on failure
6000 *==========================================================================*/
6001int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6002{
6003 int32_t rc = NO_ERROR;
6004
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006005 {
6006 Mutex::Autolock lock(mFlushLock);
6007 pthread_mutex_lock(&mMutex);
6008 if (mState != ERROR) {
6009 //if mState != ERROR, nothing to be done
6010 pthread_mutex_unlock(&mMutex);
6011 return NO_ERROR;
6012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006013 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006014
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006015 rc = flush(false /* restart channels */);
6016 if (NO_ERROR != rc) {
6017 LOGE("internal flush to handle mState = ERROR failed");
6018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006019
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006020 pthread_mutex_lock(&mMutex);
6021 mState = DEINIT;
6022 pthread_mutex_unlock(&mMutex);
6023 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006024
6025 camera3_notify_msg_t notify_msg;
6026 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6027 notify_msg.type = CAMERA3_MSG_ERROR;
6028 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6029 notify_msg.message.error.error_stream = NULL;
6030 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006031 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006032
6033 return rc;
6034}
6035
6036/*===========================================================================
6037 * FUNCTION : captureResultCb
6038 *
6039 * DESCRIPTION: Callback handler for all capture result
6040 * (streams, as well as metadata)
6041 *
6042 * PARAMETERS :
6043 * @metadata : metadata information
6044 * @buffer : actual gralloc buffer to be returned to frameworks.
6045 * NULL if metadata.
6046 *
6047 * RETURN : NONE
6048 *==========================================================================*/
6049void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6050 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6051{
6052 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006053 pthread_mutex_lock(&mMutex);
6054 uint8_t batchSize = mBatchSize;
6055 pthread_mutex_unlock(&mMutex);
6056 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006057 handleBatchMetadata(metadata_buf,
6058 true /* free_and_bufdone_meta_buf */);
6059 } else { /* mBatchSize = 0 */
6060 hdrPlusPerfLock(metadata_buf);
6061 pthread_mutex_lock(&mMutex);
6062 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006063 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006064 true /* last urgent frame of batch metadata */,
6065 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006066 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006067 pthread_mutex_unlock(&mMutex);
6068 }
6069 } else if (isInputBuffer) {
6070 pthread_mutex_lock(&mMutex);
6071 handleInputBufferWithLock(frame_number);
6072 pthread_mutex_unlock(&mMutex);
6073 } else {
6074 pthread_mutex_lock(&mMutex);
6075 handleBufferWithLock(buffer, frame_number);
6076 pthread_mutex_unlock(&mMutex);
6077 }
6078 return;
6079}
6080
6081/*===========================================================================
6082 * FUNCTION : getReprocessibleOutputStreamId
6083 *
6084 * DESCRIPTION: Get source output stream id for the input reprocess stream
6085 * based on size and format, which would be the largest
6086 * output stream if an input stream exists.
6087 *
6088 * PARAMETERS :
6089 * @id : return the stream id if found
6090 *
6091 * RETURN : int32_t type of status
6092 * NO_ERROR -- success
6093 * none-zero failure code
6094 *==========================================================================*/
6095int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6096{
6097 /* check if any output or bidirectional stream with the same size and format
6098 and return that stream */
6099 if ((mInputStreamInfo.dim.width > 0) &&
6100 (mInputStreamInfo.dim.height > 0)) {
6101 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6102 it != mStreamInfo.end(); it++) {
6103
6104 camera3_stream_t *stream = (*it)->stream;
6105 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6106 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6107 (stream->format == mInputStreamInfo.format)) {
6108 // Usage flag for an input stream and the source output stream
6109 // may be different.
6110 LOGD("Found reprocessible output stream! %p", *it);
6111 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6112 stream->usage, mInputStreamInfo.usage);
6113
6114 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6115 if (channel != NULL && channel->mStreams[0]) {
6116 id = channel->mStreams[0]->getMyServerID();
6117 return NO_ERROR;
6118 }
6119 }
6120 }
6121 } else {
6122 LOGD("No input stream, so no reprocessible output stream");
6123 }
6124 return NAME_NOT_FOUND;
6125}
6126
6127/*===========================================================================
6128 * FUNCTION : lookupFwkName
6129 *
6130 * DESCRIPTION: In case the enum is not same in fwk and backend
6131 * make sure the parameter is correctly propogated
6132 *
6133 * PARAMETERS :
6134 * @arr : map between the two enums
6135 * @len : len of the map
6136 * @hal_name : name of the hal_parm to map
6137 *
6138 * RETURN : int type of status
6139 * fwk_name -- success
6140 * none-zero failure code
6141 *==========================================================================*/
6142template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6143 size_t len, halType hal_name)
6144{
6145
6146 for (size_t i = 0; i < len; i++) {
6147 if (arr[i].hal_name == hal_name) {
6148 return arr[i].fwk_name;
6149 }
6150 }
6151
6152 /* Not able to find matching framework type is not necessarily
6153 * an error case. This happens when mm-camera supports more attributes
6154 * than the frameworks do */
6155 LOGH("Cannot find matching framework type");
6156 return NAME_NOT_FOUND;
6157}
6158
6159/*===========================================================================
6160 * FUNCTION : lookupHalName
6161 *
6162 * DESCRIPTION: In case the enum is not same in fwk and backend
6163 * make sure the parameter is correctly propogated
6164 *
6165 * PARAMETERS :
6166 * @arr : map between the two enums
6167 * @len : len of the map
6168 * @fwk_name : name of the hal_parm to map
6169 *
6170 * RETURN : int32_t type of status
6171 * hal_name -- success
6172 * none-zero failure code
6173 *==========================================================================*/
6174template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6175 size_t len, fwkType fwk_name)
6176{
6177 for (size_t i = 0; i < len; i++) {
6178 if (arr[i].fwk_name == fwk_name) {
6179 return arr[i].hal_name;
6180 }
6181 }
6182
6183 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6184 return NAME_NOT_FOUND;
6185}
6186
6187/*===========================================================================
6188 * FUNCTION : lookupProp
6189 *
6190 * DESCRIPTION: lookup a value by its name
6191 *
6192 * PARAMETERS :
6193 * @arr : map between the two enums
6194 * @len : size of the map
6195 * @name : name to be looked up
6196 *
6197 * RETURN : Value if found
6198 * CAM_CDS_MODE_MAX if not found
6199 *==========================================================================*/
6200template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6201 size_t len, const char *name)
6202{
6203 if (name) {
6204 for (size_t i = 0; i < len; i++) {
6205 if (!strcmp(arr[i].desc, name)) {
6206 return arr[i].val;
6207 }
6208 }
6209 }
6210 return CAM_CDS_MODE_MAX;
6211}
6212
6213/*===========================================================================
6214 *
6215 * DESCRIPTION:
6216 *
6217 * PARAMETERS :
6218 * @metadata : metadata information from callback
6219 * @timestamp: metadata buffer timestamp
6220 * @request_id: request id
6221 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006222 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006223 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6224 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006225 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006226 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6227 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006228 *
6229 * RETURN : camera_metadata_t*
6230 * metadata in a format specified by fwk
6231 *==========================================================================*/
6232camera_metadata_t*
6233QCamera3HardwareInterface::translateFromHalMetadata(
6234 metadata_buffer_t *metadata,
6235 nsecs_t timestamp,
6236 int32_t request_id,
6237 const CameraMetadata& jpegMetadata,
6238 uint8_t pipeline_depth,
6239 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006240 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006241 /* DevCamDebug metadata translateFromHalMetadata argument */
6242 uint8_t DevCamDebug_meta_enable,
6243 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006244 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006245 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006246 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006247{
6248 CameraMetadata camMetadata;
6249 camera_metadata_t *resultMetadata;
6250
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006251 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006252 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6253 * Timestamp is needed because it's used for shutter notify calculation.
6254 * */
6255 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6256 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006257 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006258 }
6259
Thierry Strudel3d639192016-09-09 11:52:26 -07006260 if (jpegMetadata.entryCount())
6261 camMetadata.append(jpegMetadata);
6262
6263 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6264 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6265 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6266 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006267 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006268 if (mBatchSize == 0) {
6269 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6270 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006272
Samuel Ha68ba5172016-12-15 18:41:12 -08006273 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6274 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6275 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6276 // DevCamDebug metadata translateFromHalMetadata AF
6277 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6278 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6279 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6280 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6281 }
6282 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6283 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6284 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6285 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6286 }
6287 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6288 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6289 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6290 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6291 }
6292 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6293 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6294 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6295 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6296 }
6297 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6298 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6299 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6300 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6301 }
6302 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6303 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6304 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6305 *DevCamDebug_af_monitor_pdaf_target_pos;
6306 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6307 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6308 }
6309 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6310 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6311 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6312 *DevCamDebug_af_monitor_pdaf_confidence;
6313 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6314 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6315 }
6316 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6317 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6318 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6319 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6320 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6321 }
6322 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6323 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6324 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6325 *DevCamDebug_af_monitor_tof_target_pos;
6326 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6327 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6328 }
6329 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6330 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6331 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6332 *DevCamDebug_af_monitor_tof_confidence;
6333 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6334 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6335 }
6336 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6337 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6338 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6339 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6340 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6341 }
6342 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6343 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6344 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6345 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6346 &fwk_DevCamDebug_af_monitor_type_select, 1);
6347 }
6348 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6349 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6350 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6351 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6352 &fwk_DevCamDebug_af_monitor_refocus, 1);
6353 }
6354 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6355 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6356 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6357 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6358 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6359 }
6360 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6361 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6362 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6363 *DevCamDebug_af_search_pdaf_target_pos;
6364 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6365 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6366 }
6367 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6368 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6369 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6370 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6371 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6372 }
6373 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6374 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6375 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6376 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6377 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6378 }
6379 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6380 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6381 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6382 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6383 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6384 }
6385 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6386 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6387 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6388 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6389 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6390 }
6391 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6392 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6393 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6394 *DevCamDebug_af_search_tof_target_pos;
6395 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6396 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6397 }
6398 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6399 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6400 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6401 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6402 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6403 }
6404 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6405 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6406 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6407 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6408 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6409 }
6410 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6411 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6412 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6413 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6414 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6415 }
6416 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6417 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6418 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6419 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6420 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6421 }
6422 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6423 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6424 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6425 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6426 &fwk_DevCamDebug_af_search_type_select, 1);
6427 }
6428 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6429 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6430 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6431 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6432 &fwk_DevCamDebug_af_search_next_pos, 1);
6433 }
6434 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6435 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6436 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6437 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6438 &fwk_DevCamDebug_af_search_target_pos, 1);
6439 }
6440 // DevCamDebug metadata translateFromHalMetadata AEC
6441 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6442 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6443 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6444 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6445 }
6446 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6447 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6448 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6449 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6450 }
6451 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6452 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6453 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6454 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6455 }
6456 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6457 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6458 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6459 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6460 }
6461 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6462 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6463 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6464 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6465 }
6466 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6467 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6468 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6469 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6470 }
6471 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6472 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6473 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6474 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6475 }
6476 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6477 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6478 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6479 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6480 }
Samuel Ha34229982017-02-17 13:51:11 -08006481 // DevCamDebug metadata translateFromHalMetadata zzHDR
6482 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6483 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6484 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6485 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6486 }
6487 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6488 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6489 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6490 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6491 }
6492 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6493 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6494 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6495 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6496 }
6497 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6498 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6499 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6500 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6501 }
6502 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6503 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6504 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6505 *DevCamDebug_aec_hdr_sensitivity_ratio;
6506 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6507 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6508 }
6509 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6510 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6511 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6512 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6513 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6514 }
6515 // DevCamDebug metadata translateFromHalMetadata ADRC
6516 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6517 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6518 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6519 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6520 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6521 }
6522 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6523 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6524 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6525 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6526 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6527 }
6528 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6529 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6530 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6531 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6532 }
6533 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6534 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6535 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6536 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6537 }
6538 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6539 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6540 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6541 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6542 }
6543 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6544 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6545 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6546 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6547 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006548 // DevCamDebug metadata translateFromHalMetadata AWB
6549 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6550 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6551 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6552 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6553 }
6554 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6555 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6556 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6557 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6558 }
6559 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6560 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6561 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6562 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6563 }
6564 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6565 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6566 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6567 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6568 }
6569 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6570 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6571 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6572 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6573 }
6574 }
6575 // atrace_end(ATRACE_TAG_ALWAYS);
6576
Thierry Strudel3d639192016-09-09 11:52:26 -07006577 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6578 int64_t fwk_frame_number = *frame_number;
6579 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6580 }
6581
6582 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6583 int32_t fps_range[2];
6584 fps_range[0] = (int32_t)float_range->min_fps;
6585 fps_range[1] = (int32_t)float_range->max_fps;
6586 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6587 fps_range, 2);
6588 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6589 fps_range[0], fps_range[1]);
6590 }
6591
6592 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6593 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6594 }
6595
6596 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6597 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6598 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6599 *sceneMode);
6600 if (NAME_NOT_FOUND != val) {
6601 uint8_t fwkSceneMode = (uint8_t)val;
6602 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6603 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6604 fwkSceneMode);
6605 }
6606 }
6607
6608 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6609 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6610 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6611 }
6612
6613 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6614 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6615 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6616 }
6617
6618 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6619 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6620 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6621 }
6622
6623 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6624 CAM_INTF_META_EDGE_MODE, metadata) {
6625 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6626 }
6627
6628 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6629 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6630 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6631 }
6632
6633 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6634 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6635 }
6636
6637 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6638 if (0 <= *flashState) {
6639 uint8_t fwk_flashState = (uint8_t) *flashState;
6640 if (!gCamCapability[mCameraId]->flash_available) {
6641 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6642 }
6643 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6644 }
6645 }
6646
6647 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6648 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6649 if (NAME_NOT_FOUND != val) {
6650 uint8_t fwk_flashMode = (uint8_t)val;
6651 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6652 }
6653 }
6654
6655 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6656 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6657 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6658 }
6659
6660 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6661 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6662 }
6663
6664 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6665 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6666 }
6667
6668 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6669 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6670 }
6671
6672 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6673 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6674 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6675 }
6676
6677 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6678 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6679 LOGD("fwk_videoStab = %d", fwk_videoStab);
6680 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6681 } else {
6682 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6683 // and so hardcoding the Video Stab result to OFF mode.
6684 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6685 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006686 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006687 }
6688
6689 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6690 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6691 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6692 }
6693
6694 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6695 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6696 }
6697
Thierry Strudel3d639192016-09-09 11:52:26 -07006698 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6699 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006700 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006701
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006702 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6703 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006704
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006705 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006706 blackLevelAppliedPattern->cam_black_level[0],
6707 blackLevelAppliedPattern->cam_black_level[1],
6708 blackLevelAppliedPattern->cam_black_level[2],
6709 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006710 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6711 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006712
6713#ifndef USE_HAL_3_3
6714 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006715 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6716 // depth space.
6717 fwk_blackLevelInd[0] /= 4.0;
6718 fwk_blackLevelInd[1] /= 4.0;
6719 fwk_blackLevelInd[2] /= 4.0;
6720 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006721 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6722 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006723#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006724 }
6725
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006726#ifndef USE_HAL_3_3
6727 // Fixed whitelevel is used by ISP/Sensor
6728 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6729 &gCamCapability[mCameraId]->white_level, 1);
6730#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006731
6732 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6733 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6734 int32_t scalerCropRegion[4];
6735 scalerCropRegion[0] = hScalerCropRegion->left;
6736 scalerCropRegion[1] = hScalerCropRegion->top;
6737 scalerCropRegion[2] = hScalerCropRegion->width;
6738 scalerCropRegion[3] = hScalerCropRegion->height;
6739
6740 // Adjust crop region from sensor output coordinate system to active
6741 // array coordinate system.
6742 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6743 scalerCropRegion[2], scalerCropRegion[3]);
6744
6745 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6746 }
6747
6748 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6749 LOGD("sensorExpTime = %lld", *sensorExpTime);
6750 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6751 }
6752
6753 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6754 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6755 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6756 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6757 }
6758
6759 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6760 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6761 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6762 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6763 sensorRollingShutterSkew, 1);
6764 }
6765
6766 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6767 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6768 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6769
6770 //calculate the noise profile based on sensitivity
6771 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6772 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6773 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6774 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6775 noise_profile[i] = noise_profile_S;
6776 noise_profile[i+1] = noise_profile_O;
6777 }
6778 LOGD("noise model entry (S, O) is (%f, %f)",
6779 noise_profile_S, noise_profile_O);
6780 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6781 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6782 }
6783
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006784#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006785 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006786 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006787 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006788 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006789 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6790 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6791 }
6792 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006793#endif
6794
Thierry Strudel3d639192016-09-09 11:52:26 -07006795 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6796 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6797 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6798 }
6799
6800 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6801 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6802 *faceDetectMode);
6803 if (NAME_NOT_FOUND != val) {
6804 uint8_t fwk_faceDetectMode = (uint8_t)val;
6805 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6806
6807 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6808 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6809 CAM_INTF_META_FACE_DETECTION, metadata) {
6810 uint8_t numFaces = MIN(
6811 faceDetectionInfo->num_faces_detected, MAX_ROI);
6812 int32_t faceIds[MAX_ROI];
6813 uint8_t faceScores[MAX_ROI];
6814 int32_t faceRectangles[MAX_ROI * 4];
6815 int32_t faceLandmarks[MAX_ROI * 6];
6816 size_t j = 0, k = 0;
6817
6818 for (size_t i = 0; i < numFaces; i++) {
6819 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6820 // Adjust crop region from sensor output coordinate system to active
6821 // array coordinate system.
6822 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6823 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6824 rect.width, rect.height);
6825
6826 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6827 faceRectangles+j, -1);
6828
6829 j+= 4;
6830 }
6831 if (numFaces <= 0) {
6832 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6833 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6834 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6835 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6836 }
6837
6838 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6839 numFaces);
6840 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6841 faceRectangles, numFaces * 4U);
6842 if (fwk_faceDetectMode ==
6843 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6844 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6845 CAM_INTF_META_FACE_LANDMARK, metadata) {
6846
6847 for (size_t i = 0; i < numFaces; i++) {
6848 // Map the co-ordinate sensor output coordinate system to active
6849 // array coordinate system.
6850 mCropRegionMapper.toActiveArray(
6851 landmarks->face_landmarks[i].left_eye_center.x,
6852 landmarks->face_landmarks[i].left_eye_center.y);
6853 mCropRegionMapper.toActiveArray(
6854 landmarks->face_landmarks[i].right_eye_center.x,
6855 landmarks->face_landmarks[i].right_eye_center.y);
6856 mCropRegionMapper.toActiveArray(
6857 landmarks->face_landmarks[i].mouth_center.x,
6858 landmarks->face_landmarks[i].mouth_center.y);
6859
6860 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006861 k+= TOTAL_LANDMARK_INDICES;
6862 }
6863 } else {
6864 for (size_t i = 0; i < numFaces; i++) {
6865 setInvalidLandmarks(faceLandmarks+k);
6866 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006867 }
6868 }
6869
6870 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6871 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6872 faceLandmarks, numFaces * 6U);
6873 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006874 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6875 CAM_INTF_META_FACE_BLINK, metadata) {
6876 uint8_t detected[MAX_ROI];
6877 uint8_t degree[MAX_ROI * 2];
6878 for (size_t i = 0; i < numFaces; i++) {
6879 detected[i] = blinks->blink[i].blink_detected;
6880 degree[2 * i] = blinks->blink[i].left_blink;
6881 degree[2 * i + 1] = blinks->blink[i].right_blink;
6882 }
6883 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6884 detected, numFaces);
6885 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6886 degree, numFaces * 2);
6887 }
6888 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6889 CAM_INTF_META_FACE_SMILE, metadata) {
6890 uint8_t degree[MAX_ROI];
6891 uint8_t confidence[MAX_ROI];
6892 for (size_t i = 0; i < numFaces; i++) {
6893 degree[i] = smiles->smile[i].smile_degree;
6894 confidence[i] = smiles->smile[i].smile_confidence;
6895 }
6896 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6897 degree, numFaces);
6898 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6899 confidence, numFaces);
6900 }
6901 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6902 CAM_INTF_META_FACE_GAZE, metadata) {
6903 int8_t angle[MAX_ROI];
6904 int32_t direction[MAX_ROI * 3];
6905 int8_t degree[MAX_ROI * 2];
6906 for (size_t i = 0; i < numFaces; i++) {
6907 angle[i] = gazes->gaze[i].gaze_angle;
6908 direction[3 * i] = gazes->gaze[i].updown_dir;
6909 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6910 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6911 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6912 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6913 }
6914 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6915 (uint8_t *)angle, numFaces);
6916 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6917 direction, numFaces * 3);
6918 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6919 (uint8_t *)degree, numFaces * 2);
6920 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006921 }
6922 }
6923 }
6924 }
6925
6926 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6927 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08006928 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006929 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08006930 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006931
Shuzhen Wang14415f52016-11-16 18:26:18 -08006932 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
6933 histogramBins = *histBins;
6934 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
6935 }
6936
6937 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006938 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6939 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08006940 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006941
6942 switch (stats_data->type) {
6943 case CAM_HISTOGRAM_TYPE_BAYER:
6944 switch (stats_data->bayer_stats.data_type) {
6945 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006946 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
6947 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006948 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006949 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
6950 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006951 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006952 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
6953 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006954 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006955 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006956 case CAM_STATS_CHANNEL_R:
6957 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006958 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
6959 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006960 }
6961 break;
6962 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006963 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006964 break;
6965 }
6966
Shuzhen Wang14415f52016-11-16 18:26:18 -08006967 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006968 }
6969 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006970 }
6971
6972 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6973 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6974 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6975 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6976 }
6977
6978 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6979 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6980 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6981 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6982 }
6983
6984 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6985 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6986 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6987 CAM_MAX_SHADING_MAP_HEIGHT);
6988 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6989 CAM_MAX_SHADING_MAP_WIDTH);
6990 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6991 lensShadingMap->lens_shading, 4U * map_width * map_height);
6992 }
6993
6994 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6995 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6996 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6997 }
6998
6999 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7000 //Populate CAM_INTF_META_TONEMAP_CURVES
7001 /* ch0 = G, ch 1 = B, ch 2 = R*/
7002 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7003 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7004 tonemap->tonemap_points_cnt,
7005 CAM_MAX_TONEMAP_CURVE_SIZE);
7006 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7007 }
7008
7009 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7010 &tonemap->curves[0].tonemap_points[0][0],
7011 tonemap->tonemap_points_cnt * 2);
7012
7013 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7014 &tonemap->curves[1].tonemap_points[0][0],
7015 tonemap->tonemap_points_cnt * 2);
7016
7017 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7018 &tonemap->curves[2].tonemap_points[0][0],
7019 tonemap->tonemap_points_cnt * 2);
7020 }
7021
7022 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7023 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7024 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7025 CC_GAIN_MAX);
7026 }
7027
7028 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7029 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7030 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7031 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7032 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7033 }
7034
7035 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7036 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7037 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7038 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7039 toneCurve->tonemap_points_cnt,
7040 CAM_MAX_TONEMAP_CURVE_SIZE);
7041 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7042 }
7043 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7044 (float*)toneCurve->curve.tonemap_points,
7045 toneCurve->tonemap_points_cnt * 2);
7046 }
7047
7048 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7049 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7050 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7051 predColorCorrectionGains->gains, 4);
7052 }
7053
7054 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7055 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7056 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7057 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7058 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7059 }
7060
7061 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7062 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7066 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7067 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7068 }
7069
7070 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7071 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7072 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7073 }
7074
7075 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7076 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7077 *effectMode);
7078 if (NAME_NOT_FOUND != val) {
7079 uint8_t fwk_effectMode = (uint8_t)val;
7080 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7081 }
7082 }
7083
7084 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7085 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7086 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7087 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7088 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7089 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7090 }
7091 int32_t fwk_testPatternData[4];
7092 fwk_testPatternData[0] = testPatternData->r;
7093 fwk_testPatternData[3] = testPatternData->b;
7094 switch (gCamCapability[mCameraId]->color_arrangement) {
7095 case CAM_FILTER_ARRANGEMENT_RGGB:
7096 case CAM_FILTER_ARRANGEMENT_GRBG:
7097 fwk_testPatternData[1] = testPatternData->gr;
7098 fwk_testPatternData[2] = testPatternData->gb;
7099 break;
7100 case CAM_FILTER_ARRANGEMENT_GBRG:
7101 case CAM_FILTER_ARRANGEMENT_BGGR:
7102 fwk_testPatternData[2] = testPatternData->gr;
7103 fwk_testPatternData[1] = testPatternData->gb;
7104 break;
7105 default:
7106 LOGE("color arrangement %d is not supported",
7107 gCamCapability[mCameraId]->color_arrangement);
7108 break;
7109 }
7110 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7111 }
7112
7113 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7114 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7115 }
7116
7117 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7118 String8 str((const char *)gps_methods);
7119 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7120 }
7121
7122 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7123 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7127 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7128 }
7129
7130 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7131 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7132 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7133 }
7134
7135 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7136 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7137 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7138 }
7139
7140 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7141 int32_t fwk_thumb_size[2];
7142 fwk_thumb_size[0] = thumb_size->width;
7143 fwk_thumb_size[1] = thumb_size->height;
7144 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7145 }
7146
7147 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7148 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7149 privateData,
7150 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7151 }
7152
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007153 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007154 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007155 meteringMode, 1);
7156 }
7157
Thierry Strudel54dc9782017-02-15 12:12:10 -08007158 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7159 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7160 LOGD("hdr_scene_data: %d %f\n",
7161 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7162 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7163 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7164 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7165 &isHdr, 1);
7166 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7167 &isHdrConfidence, 1);
7168 }
7169
7170
7171
Thierry Strudel3d639192016-09-09 11:52:26 -07007172 if (metadata->is_tuning_params_valid) {
7173 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7174 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7175 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7176
7177
7178 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7179 sizeof(uint32_t));
7180 data += sizeof(uint32_t);
7181
7182 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7183 sizeof(uint32_t));
7184 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7185 data += sizeof(uint32_t);
7186
7187 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7188 sizeof(uint32_t));
7189 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7190 data += sizeof(uint32_t);
7191
7192 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7193 sizeof(uint32_t));
7194 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7195 data += sizeof(uint32_t);
7196
7197 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7198 sizeof(uint32_t));
7199 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7200 data += sizeof(uint32_t);
7201
7202 metadata->tuning_params.tuning_mod3_data_size = 0;
7203 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7204 sizeof(uint32_t));
7205 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7206 data += sizeof(uint32_t);
7207
7208 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7209 TUNING_SENSOR_DATA_MAX);
7210 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7211 count);
7212 data += count;
7213
7214 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7215 TUNING_VFE_DATA_MAX);
7216 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7217 count);
7218 data += count;
7219
7220 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7221 TUNING_CPP_DATA_MAX);
7222 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7223 count);
7224 data += count;
7225
7226 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7227 TUNING_CAC_DATA_MAX);
7228 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7229 count);
7230 data += count;
7231
7232 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7233 (int32_t *)(void *)tuning_meta_data_blob,
7234 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7235 }
7236
7237 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7238 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7239 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7240 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7241 NEUTRAL_COL_POINTS);
7242 }
7243
7244 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7245 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7246 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7247 }
7248
7249 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7250 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7251 // Adjust crop region from sensor output coordinate system to active
7252 // array coordinate system.
7253 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7254 hAeRegions->rect.width, hAeRegions->rect.height);
7255
7256 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7257 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7258 REGIONS_TUPLE_COUNT);
7259 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7260 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7261 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7262 hAeRegions->rect.height);
7263 }
7264
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007265 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7266 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7267 if (NAME_NOT_FOUND != val) {
7268 uint8_t fwkAfMode = (uint8_t)val;
7269 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7270 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7271 } else {
7272 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7273 val);
7274 }
7275 }
7276
Thierry Strudel3d639192016-09-09 11:52:26 -07007277 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7278 uint8_t fwk_afState = (uint8_t) *afState;
7279 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007280 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007281 }
7282
7283 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7284 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7285 }
7286
7287 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7288 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7289 }
7290
7291 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7292 uint8_t fwk_lensState = *lensState;
7293 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7294 }
7295
7296 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7297 /*af regions*/
7298 int32_t afRegions[REGIONS_TUPLE_COUNT];
7299 // Adjust crop region from sensor output coordinate system to active
7300 // array coordinate system.
7301 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7302 hAfRegions->rect.width, hAfRegions->rect.height);
7303
7304 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7305 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7306 REGIONS_TUPLE_COUNT);
7307 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7308 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7309 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7310 hAfRegions->rect.height);
7311 }
7312
7313 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007314 uint32_t ab_mode = *hal_ab_mode;
7315 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7316 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7317 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7318 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007319 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007320 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007321 if (NAME_NOT_FOUND != val) {
7322 uint8_t fwk_ab_mode = (uint8_t)val;
7323 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7324 }
7325 }
7326
7327 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7328 int val = lookupFwkName(SCENE_MODES_MAP,
7329 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7330 if (NAME_NOT_FOUND != val) {
7331 uint8_t fwkBestshotMode = (uint8_t)val;
7332 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7333 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7334 } else {
7335 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7336 }
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7340 uint8_t fwk_mode = (uint8_t) *mode;
7341 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7342 }
7343
7344 /* Constant metadata values to be update*/
7345 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7346 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7347
7348 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7349 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7350
7351 int32_t hotPixelMap[2];
7352 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7353
7354 // CDS
7355 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7356 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7357 }
7358
Thierry Strudel04e026f2016-10-10 11:27:36 -07007359 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7360 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007361 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007362 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7363 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7364 } else {
7365 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7366 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007367
7368 if(fwk_hdr != curr_hdr_state) {
7369 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7370 if(fwk_hdr)
7371 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7372 else
7373 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7374 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007375 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7376 }
7377
Thierry Strudel54dc9782017-02-15 12:12:10 -08007378 //binning correction
7379 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7380 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7381 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7382 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7383 }
7384
Thierry Strudel04e026f2016-10-10 11:27:36 -07007385 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007386 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007387 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7388 int8_t is_ir_on = 0;
7389
7390 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7391 if(is_ir_on != curr_ir_state) {
7392 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7393 if(is_ir_on)
7394 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7395 else
7396 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7397 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007398 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007399 }
7400
Thierry Strudel269c81a2016-10-12 12:13:59 -07007401 // AEC SPEED
7402 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7403 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7404 }
7405
7406 // AWB SPEED
7407 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7408 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7409 }
7410
Thierry Strudel3d639192016-09-09 11:52:26 -07007411 // TNR
7412 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7413 uint8_t tnr_enable = tnr->denoise_enable;
7414 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007415 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7416 int8_t is_tnr_on = 0;
7417
7418 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7419 if(is_tnr_on != curr_tnr_state) {
7420 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7421 if(is_tnr_on)
7422 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7423 else
7424 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007426
7427 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7428 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7429 }
7430
7431 // Reprocess crop data
7432 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7433 uint8_t cnt = crop_data->num_of_streams;
7434 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7435 // mm-qcamera-daemon only posts crop_data for streams
7436 // not linked to pproc. So no valid crop metadata is not
7437 // necessarily an error case.
7438 LOGD("No valid crop metadata entries");
7439 } else {
7440 uint32_t reproc_stream_id;
7441 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7442 LOGD("No reprocessible stream found, ignore crop data");
7443 } else {
7444 int rc = NO_ERROR;
7445 Vector<int32_t> roi_map;
7446 int32_t *crop = new int32_t[cnt*4];
7447 if (NULL == crop) {
7448 rc = NO_MEMORY;
7449 }
7450 if (NO_ERROR == rc) {
7451 int32_t streams_found = 0;
7452 for (size_t i = 0; i < cnt; i++) {
7453 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7454 if (pprocDone) {
7455 // HAL already does internal reprocessing,
7456 // either via reprocessing before JPEG encoding,
7457 // or offline postprocessing for pproc bypass case.
7458 crop[0] = 0;
7459 crop[1] = 0;
7460 crop[2] = mInputStreamInfo.dim.width;
7461 crop[3] = mInputStreamInfo.dim.height;
7462 } else {
7463 crop[0] = crop_data->crop_info[i].crop.left;
7464 crop[1] = crop_data->crop_info[i].crop.top;
7465 crop[2] = crop_data->crop_info[i].crop.width;
7466 crop[3] = crop_data->crop_info[i].crop.height;
7467 }
7468 roi_map.add(crop_data->crop_info[i].roi_map.left);
7469 roi_map.add(crop_data->crop_info[i].roi_map.top);
7470 roi_map.add(crop_data->crop_info[i].roi_map.width);
7471 roi_map.add(crop_data->crop_info[i].roi_map.height);
7472 streams_found++;
7473 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7474 crop[0], crop[1], crop[2], crop[3]);
7475 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7476 crop_data->crop_info[i].roi_map.left,
7477 crop_data->crop_info[i].roi_map.top,
7478 crop_data->crop_info[i].roi_map.width,
7479 crop_data->crop_info[i].roi_map.height);
7480 break;
7481
7482 }
7483 }
7484 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7485 &streams_found, 1);
7486 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7487 crop, (size_t)(streams_found * 4));
7488 if (roi_map.array()) {
7489 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7490 roi_map.array(), roi_map.size());
7491 }
7492 }
7493 if (crop) {
7494 delete [] crop;
7495 }
7496 }
7497 }
7498 }
7499
7500 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7501 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7502 // so hardcoding the CAC result to OFF mode.
7503 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7504 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7505 } else {
7506 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7507 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7508 *cacMode);
7509 if (NAME_NOT_FOUND != val) {
7510 uint8_t resultCacMode = (uint8_t)val;
7511 // check whether CAC result from CB is equal to Framework set CAC mode
7512 // If not equal then set the CAC mode came in corresponding request
7513 if (fwk_cacMode != resultCacMode) {
7514 resultCacMode = fwk_cacMode;
7515 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007516 //Check if CAC is disabled by property
7517 if (m_cacModeDisabled) {
7518 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7519 }
7520
Thierry Strudel3d639192016-09-09 11:52:26 -07007521 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7522 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7523 } else {
7524 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7525 }
7526 }
7527 }
7528
7529 // Post blob of cam_cds_data through vendor tag.
7530 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7531 uint8_t cnt = cdsInfo->num_of_streams;
7532 cam_cds_data_t cdsDataOverride;
7533 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7534 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7535 cdsDataOverride.num_of_streams = 1;
7536 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7537 uint32_t reproc_stream_id;
7538 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7539 LOGD("No reprocessible stream found, ignore cds data");
7540 } else {
7541 for (size_t i = 0; i < cnt; i++) {
7542 if (cdsInfo->cds_info[i].stream_id ==
7543 reproc_stream_id) {
7544 cdsDataOverride.cds_info[0].cds_enable =
7545 cdsInfo->cds_info[i].cds_enable;
7546 break;
7547 }
7548 }
7549 }
7550 } else {
7551 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7552 }
7553 camMetadata.update(QCAMERA3_CDS_INFO,
7554 (uint8_t *)&cdsDataOverride,
7555 sizeof(cam_cds_data_t));
7556 }
7557
7558 // Ldaf calibration data
7559 if (!mLdafCalibExist) {
7560 IF_META_AVAILABLE(uint32_t, ldafCalib,
7561 CAM_INTF_META_LDAF_EXIF, metadata) {
7562 mLdafCalibExist = true;
7563 mLdafCalib[0] = ldafCalib[0];
7564 mLdafCalib[1] = ldafCalib[1];
7565 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7566 ldafCalib[0], ldafCalib[1]);
7567 }
7568 }
7569
Thierry Strudel54dc9782017-02-15 12:12:10 -08007570 // EXIF debug data through vendor tag
7571 /*
7572 * Mobicat Mask can assume 3 values:
7573 * 1 refers to Mobicat data,
7574 * 2 refers to Stats Debug and Exif Debug Data
7575 * 3 refers to Mobicat and Stats Debug Data
7576 * We want to make sure that we are sending Exif debug data
7577 * only when Mobicat Mask is 2.
7578 */
7579 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7580 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7581 (uint8_t *)(void *)mExifParams.debug_params,
7582 sizeof(mm_jpeg_debug_exif_params_t));
7583 }
7584
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007585 // Reprocess and DDM debug data through vendor tag
7586 cam_reprocess_info_t repro_info;
7587 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007588 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7589 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007590 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007591 }
7592 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7593 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007594 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007595 }
7596 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7597 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007598 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007599 }
7600 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7601 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007602 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007603 }
7604 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7605 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007606 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007607 }
7608 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007609 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007610 }
7611 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7612 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007613 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007614 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007615 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7616 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7617 }
7618 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7619 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7620 }
7621 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7622 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007623
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007624 // INSTANT AEC MODE
7625 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7626 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7627 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7628 }
7629
Shuzhen Wange763e802016-03-31 10:24:29 -07007630 // AF scene change
7631 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7632 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7633 }
7634
Thierry Strudel3d639192016-09-09 11:52:26 -07007635 resultMetadata = camMetadata.release();
7636 return resultMetadata;
7637}
7638
7639/*===========================================================================
7640 * FUNCTION : saveExifParams
7641 *
7642 * DESCRIPTION:
7643 *
7644 * PARAMETERS :
7645 * @metadata : metadata information from callback
7646 *
7647 * RETURN : none
7648 *
7649 *==========================================================================*/
7650void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7651{
7652 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7653 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7654 if (mExifParams.debug_params) {
7655 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7656 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7657 }
7658 }
7659 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7660 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7661 if (mExifParams.debug_params) {
7662 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7663 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7664 }
7665 }
7666 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7667 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7668 if (mExifParams.debug_params) {
7669 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7670 mExifParams.debug_params->af_debug_params_valid = TRUE;
7671 }
7672 }
7673 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7674 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7675 if (mExifParams.debug_params) {
7676 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7677 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7678 }
7679 }
7680 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7681 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7682 if (mExifParams.debug_params) {
7683 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7684 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7685 }
7686 }
7687 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7688 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7689 if (mExifParams.debug_params) {
7690 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7691 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7692 }
7693 }
7694 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7695 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7696 if (mExifParams.debug_params) {
7697 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7698 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7699 }
7700 }
7701 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7702 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7703 if (mExifParams.debug_params) {
7704 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7705 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7706 }
7707 }
7708}
7709
7710/*===========================================================================
7711 * FUNCTION : get3AExifParams
7712 *
7713 * DESCRIPTION:
7714 *
7715 * PARAMETERS : none
7716 *
7717 *
7718 * RETURN : mm_jpeg_exif_params_t
7719 *
7720 *==========================================================================*/
7721mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7722{
7723 return mExifParams;
7724}
7725
7726/*===========================================================================
7727 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7728 *
7729 * DESCRIPTION:
7730 *
7731 * PARAMETERS :
7732 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007733 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7734 * urgent metadata in a batch. Always true for
7735 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007736 *
7737 * RETURN : camera_metadata_t*
7738 * metadata in a format specified by fwk
7739 *==========================================================================*/
7740camera_metadata_t*
7741QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007742 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007743{
7744 CameraMetadata camMetadata;
7745 camera_metadata_t *resultMetadata;
7746
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007747 if (!lastUrgentMetadataInBatch) {
7748 /* In batch mode, use empty metadata if this is not the last in batch
7749 */
7750 resultMetadata = allocate_camera_metadata(0, 0);
7751 return resultMetadata;
7752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007753
7754 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7755 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7756 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7757 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7758 }
7759
7760 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7761 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7762 &aecTrigger->trigger, 1);
7763 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7764 &aecTrigger->trigger_id, 1);
7765 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7766 aecTrigger->trigger);
7767 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7768 aecTrigger->trigger_id);
7769 }
7770
7771 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7772 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7773 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7774 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7775 }
7776
Thierry Strudel3d639192016-09-09 11:52:26 -07007777 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7778 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7779 &af_trigger->trigger, 1);
7780 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7781 af_trigger->trigger);
7782 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7783 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7784 af_trigger->trigger_id);
7785 }
7786
7787 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7788 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7789 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7790 if (NAME_NOT_FOUND != val) {
7791 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7792 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7793 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7794 } else {
7795 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7796 }
7797 }
7798
7799 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7800 uint32_t aeMode = CAM_AE_MODE_MAX;
7801 int32_t flashMode = CAM_FLASH_MODE_MAX;
7802 int32_t redeye = -1;
7803 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7804 aeMode = *pAeMode;
7805 }
7806 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7807 flashMode = *pFlashMode;
7808 }
7809 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7810 redeye = *pRedeye;
7811 }
7812
7813 if (1 == redeye) {
7814 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7815 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7816 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7817 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7818 flashMode);
7819 if (NAME_NOT_FOUND != val) {
7820 fwk_aeMode = (uint8_t)val;
7821 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7822 } else {
7823 LOGE("Unsupported flash mode %d", flashMode);
7824 }
7825 } else if (aeMode == CAM_AE_MODE_ON) {
7826 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7827 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7828 } else if (aeMode == CAM_AE_MODE_OFF) {
7829 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7830 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7831 } else {
7832 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7833 "flashMode:%d, aeMode:%u!!!",
7834 redeye, flashMode, aeMode);
7835 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007836 if (mInstantAEC) {
7837 // Increment frame Idx count untill a bound reached for instant AEC.
7838 mInstantAecFrameIdxCount++;
7839 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7840 CAM_INTF_META_AEC_INFO, metadata) {
7841 LOGH("ae_params->settled = %d",ae_params->settled);
7842 // If AEC settled, or if number of frames reached bound value,
7843 // should reset instant AEC.
7844 if (ae_params->settled ||
7845 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7846 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7847 mInstantAEC = false;
7848 mResetInstantAEC = true;
7849 mInstantAecFrameIdxCount = 0;
7850 }
7851 }
7852 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007853 resultMetadata = camMetadata.release();
7854 return resultMetadata;
7855}
7856
7857/*===========================================================================
7858 * FUNCTION : dumpMetadataToFile
7859 *
7860 * DESCRIPTION: Dumps tuning metadata to file system
7861 *
7862 * PARAMETERS :
7863 * @meta : tuning metadata
7864 * @dumpFrameCount : current dump frame count
7865 * @enabled : Enable mask
7866 *
7867 *==========================================================================*/
7868void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7869 uint32_t &dumpFrameCount,
7870 bool enabled,
7871 const char *type,
7872 uint32_t frameNumber)
7873{
7874 //Some sanity checks
7875 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7876 LOGE("Tuning sensor data size bigger than expected %d: %d",
7877 meta.tuning_sensor_data_size,
7878 TUNING_SENSOR_DATA_MAX);
7879 return;
7880 }
7881
7882 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7883 LOGE("Tuning VFE data size bigger than expected %d: %d",
7884 meta.tuning_vfe_data_size,
7885 TUNING_VFE_DATA_MAX);
7886 return;
7887 }
7888
7889 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7890 LOGE("Tuning CPP data size bigger than expected %d: %d",
7891 meta.tuning_cpp_data_size,
7892 TUNING_CPP_DATA_MAX);
7893 return;
7894 }
7895
7896 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7897 LOGE("Tuning CAC data size bigger than expected %d: %d",
7898 meta.tuning_cac_data_size,
7899 TUNING_CAC_DATA_MAX);
7900 return;
7901 }
7902 //
7903
7904 if(enabled){
7905 char timeBuf[FILENAME_MAX];
7906 char buf[FILENAME_MAX];
7907 memset(buf, 0, sizeof(buf));
7908 memset(timeBuf, 0, sizeof(timeBuf));
7909 time_t current_time;
7910 struct tm * timeinfo;
7911 time (&current_time);
7912 timeinfo = localtime (&current_time);
7913 if (timeinfo != NULL) {
7914 strftime (timeBuf, sizeof(timeBuf),
7915 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7916 }
7917 String8 filePath(timeBuf);
7918 snprintf(buf,
7919 sizeof(buf),
7920 "%dm_%s_%d.bin",
7921 dumpFrameCount,
7922 type,
7923 frameNumber);
7924 filePath.append(buf);
7925 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7926 if (file_fd >= 0) {
7927 ssize_t written_len = 0;
7928 meta.tuning_data_version = TUNING_DATA_VERSION;
7929 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7930 written_len += write(file_fd, data, sizeof(uint32_t));
7931 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7932 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7933 written_len += write(file_fd, data, sizeof(uint32_t));
7934 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7935 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7936 written_len += write(file_fd, data, sizeof(uint32_t));
7937 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7938 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7939 written_len += write(file_fd, data, sizeof(uint32_t));
7940 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7941 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7942 written_len += write(file_fd, data, sizeof(uint32_t));
7943 meta.tuning_mod3_data_size = 0;
7944 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7945 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7946 written_len += write(file_fd, data, sizeof(uint32_t));
7947 size_t total_size = meta.tuning_sensor_data_size;
7948 data = (void *)((uint8_t *)&meta.data);
7949 written_len += write(file_fd, data, total_size);
7950 total_size = meta.tuning_vfe_data_size;
7951 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7952 written_len += write(file_fd, data, total_size);
7953 total_size = meta.tuning_cpp_data_size;
7954 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7955 written_len += write(file_fd, data, total_size);
7956 total_size = meta.tuning_cac_data_size;
7957 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7958 written_len += write(file_fd, data, total_size);
7959 close(file_fd);
7960 }else {
7961 LOGE("fail to open file for metadata dumping");
7962 }
7963 }
7964}
7965
7966/*===========================================================================
7967 * FUNCTION : cleanAndSortStreamInfo
7968 *
7969 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7970 * and sort them such that raw stream is at the end of the list
7971 * This is a workaround for camera daemon constraint.
7972 *
7973 * PARAMETERS : None
7974 *
7975 *==========================================================================*/
7976void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7977{
7978 List<stream_info_t *> newStreamInfo;
7979
7980 /*clean up invalid streams*/
7981 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7982 it != mStreamInfo.end();) {
7983 if(((*it)->status) == INVALID){
7984 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7985 delete channel;
7986 free(*it);
7987 it = mStreamInfo.erase(it);
7988 } else {
7989 it++;
7990 }
7991 }
7992
7993 // Move preview/video/callback/snapshot streams into newList
7994 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7995 it != mStreamInfo.end();) {
7996 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7997 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7998 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7999 newStreamInfo.push_back(*it);
8000 it = mStreamInfo.erase(it);
8001 } else
8002 it++;
8003 }
8004 // Move raw streams into newList
8005 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8006 it != mStreamInfo.end();) {
8007 newStreamInfo.push_back(*it);
8008 it = mStreamInfo.erase(it);
8009 }
8010
8011 mStreamInfo = newStreamInfo;
8012}
8013
8014/*===========================================================================
8015 * FUNCTION : extractJpegMetadata
8016 *
8017 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8018 * JPEG metadata is cached in HAL, and return as part of capture
8019 * result when metadata is returned from camera daemon.
8020 *
8021 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8022 * @request: capture request
8023 *
8024 *==========================================================================*/
8025void QCamera3HardwareInterface::extractJpegMetadata(
8026 CameraMetadata& jpegMetadata,
8027 const camera3_capture_request_t *request)
8028{
8029 CameraMetadata frame_settings;
8030 frame_settings = request->settings;
8031
8032 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8033 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8034 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8035 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8036
8037 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8038 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8039 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8040 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8041
8042 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8043 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8044 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8045 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8046
8047 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8048 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8049 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8050 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8051
8052 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8053 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8054 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8055 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8056
8057 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8058 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8059 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8060 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8061
8062 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8063 int32_t thumbnail_size[2];
8064 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8065 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8066 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8067 int32_t orientation =
8068 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008069 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008070 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8071 int32_t temp;
8072 temp = thumbnail_size[0];
8073 thumbnail_size[0] = thumbnail_size[1];
8074 thumbnail_size[1] = temp;
8075 }
8076 }
8077 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8078 thumbnail_size,
8079 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8080 }
8081
8082}
8083
8084/*===========================================================================
8085 * FUNCTION : convertToRegions
8086 *
8087 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8088 *
8089 * PARAMETERS :
8090 * @rect : cam_rect_t struct to convert
8091 * @region : int32_t destination array
8092 * @weight : if we are converting from cam_area_t, weight is valid
8093 * else weight = -1
8094 *
8095 *==========================================================================*/
8096void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8097 int32_t *region, int weight)
8098{
8099 region[0] = rect.left;
8100 region[1] = rect.top;
8101 region[2] = rect.left + rect.width;
8102 region[3] = rect.top + rect.height;
8103 if (weight > -1) {
8104 region[4] = weight;
8105 }
8106}
8107
8108/*===========================================================================
8109 * FUNCTION : convertFromRegions
8110 *
8111 * DESCRIPTION: helper method to convert from array to cam_rect_t
8112 *
8113 * PARAMETERS :
8114 * @rect : cam_rect_t struct to convert
8115 * @region : int32_t destination array
8116 * @weight : if we are converting from cam_area_t, weight is valid
8117 * else weight = -1
8118 *
8119 *==========================================================================*/
8120void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008121 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008122{
Thierry Strudel3d639192016-09-09 11:52:26 -07008123 int32_t x_min = frame_settings.find(tag).data.i32[0];
8124 int32_t y_min = frame_settings.find(tag).data.i32[1];
8125 int32_t x_max = frame_settings.find(tag).data.i32[2];
8126 int32_t y_max = frame_settings.find(tag).data.i32[3];
8127 roi.weight = frame_settings.find(tag).data.i32[4];
8128 roi.rect.left = x_min;
8129 roi.rect.top = y_min;
8130 roi.rect.width = x_max - x_min;
8131 roi.rect.height = y_max - y_min;
8132}
8133
8134/*===========================================================================
8135 * FUNCTION : resetIfNeededROI
8136 *
8137 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8138 * crop region
8139 *
8140 * PARAMETERS :
8141 * @roi : cam_area_t struct to resize
8142 * @scalerCropRegion : cam_crop_region_t region to compare against
8143 *
8144 *
8145 *==========================================================================*/
8146bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8147 const cam_crop_region_t* scalerCropRegion)
8148{
8149 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8150 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8151 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8152 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8153
8154 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8155 * without having this check the calculations below to validate if the roi
8156 * is inside scalar crop region will fail resulting in the roi not being
8157 * reset causing algorithm to continue to use stale roi window
8158 */
8159 if (roi->weight == 0) {
8160 return true;
8161 }
8162
8163 if ((roi_x_max < scalerCropRegion->left) ||
8164 // right edge of roi window is left of scalar crop's left edge
8165 (roi_y_max < scalerCropRegion->top) ||
8166 // bottom edge of roi window is above scalar crop's top edge
8167 (roi->rect.left > crop_x_max) ||
8168 // left edge of roi window is beyond(right) of scalar crop's right edge
8169 (roi->rect.top > crop_y_max)){
8170 // top edge of roi windo is above scalar crop's top edge
8171 return false;
8172 }
8173 if (roi->rect.left < scalerCropRegion->left) {
8174 roi->rect.left = scalerCropRegion->left;
8175 }
8176 if (roi->rect.top < scalerCropRegion->top) {
8177 roi->rect.top = scalerCropRegion->top;
8178 }
8179 if (roi_x_max > crop_x_max) {
8180 roi_x_max = crop_x_max;
8181 }
8182 if (roi_y_max > crop_y_max) {
8183 roi_y_max = crop_y_max;
8184 }
8185 roi->rect.width = roi_x_max - roi->rect.left;
8186 roi->rect.height = roi_y_max - roi->rect.top;
8187 return true;
8188}
8189
8190/*===========================================================================
8191 * FUNCTION : convertLandmarks
8192 *
8193 * DESCRIPTION: helper method to extract the landmarks from face detection info
8194 *
8195 * PARAMETERS :
8196 * @landmark_data : input landmark data to be converted
8197 * @landmarks : int32_t destination array
8198 *
8199 *
8200 *==========================================================================*/
8201void QCamera3HardwareInterface::convertLandmarks(
8202 cam_face_landmarks_info_t landmark_data,
8203 int32_t *landmarks)
8204{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008205 if (landmark_data.is_left_eye_valid) {
8206 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8207 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8208 } else {
8209 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8210 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8211 }
8212
8213 if (landmark_data.is_right_eye_valid) {
8214 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8215 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8216 } else {
8217 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8218 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8219 }
8220
8221 if (landmark_data.is_mouth_valid) {
8222 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8223 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8224 } else {
8225 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8226 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8227 }
8228}
8229
8230/*===========================================================================
8231 * FUNCTION : setInvalidLandmarks
8232 *
8233 * DESCRIPTION: helper method to set invalid landmarks
8234 *
8235 * PARAMETERS :
8236 * @landmarks : int32_t destination array
8237 *
8238 *
8239 *==========================================================================*/
8240void QCamera3HardwareInterface::setInvalidLandmarks(
8241 int32_t *landmarks)
8242{
8243 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8244 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8245 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8246 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8247 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8248 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008249}
8250
8251#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008252
8253/*===========================================================================
8254 * FUNCTION : getCapabilities
8255 *
8256 * DESCRIPTION: query camera capability from back-end
8257 *
8258 * PARAMETERS :
8259 * @ops : mm-interface ops structure
8260 * @cam_handle : camera handle for which we need capability
8261 *
8262 * RETURN : ptr type of capability structure
8263 * capability for success
8264 * NULL for failure
8265 *==========================================================================*/
8266cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8267 uint32_t cam_handle)
8268{
8269 int rc = NO_ERROR;
8270 QCamera3HeapMemory *capabilityHeap = NULL;
8271 cam_capability_t *cap_ptr = NULL;
8272
8273 if (ops == NULL) {
8274 LOGE("Invalid arguments");
8275 return NULL;
8276 }
8277
8278 capabilityHeap = new QCamera3HeapMemory(1);
8279 if (capabilityHeap == NULL) {
8280 LOGE("creation of capabilityHeap failed");
8281 return NULL;
8282 }
8283
8284 /* Allocate memory for capability buffer */
8285 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8286 if(rc != OK) {
8287 LOGE("No memory for cappability");
8288 goto allocate_failed;
8289 }
8290
8291 /* Map memory for capability buffer */
8292 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8293
8294 rc = ops->map_buf(cam_handle,
8295 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8296 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8297 if(rc < 0) {
8298 LOGE("failed to map capability buffer");
8299 rc = FAILED_TRANSACTION;
8300 goto map_failed;
8301 }
8302
8303 /* Query Capability */
8304 rc = ops->query_capability(cam_handle);
8305 if(rc < 0) {
8306 LOGE("failed to query capability");
8307 rc = FAILED_TRANSACTION;
8308 goto query_failed;
8309 }
8310
8311 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8312 if (cap_ptr == NULL) {
8313 LOGE("out of memory");
8314 rc = NO_MEMORY;
8315 goto query_failed;
8316 }
8317
8318 memset(cap_ptr, 0, sizeof(cam_capability_t));
8319 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8320
8321 int index;
8322 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8323 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8324 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8325 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8326 }
8327
8328query_failed:
8329 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8330map_failed:
8331 capabilityHeap->deallocate();
8332allocate_failed:
8333 delete capabilityHeap;
8334
8335 if (rc != NO_ERROR) {
8336 return NULL;
8337 } else {
8338 return cap_ptr;
8339 }
8340}
8341
Thierry Strudel3d639192016-09-09 11:52:26 -07008342/*===========================================================================
8343 * FUNCTION : initCapabilities
8344 *
8345 * DESCRIPTION: initialize camera capabilities in static data struct
8346 *
8347 * PARAMETERS :
8348 * @cameraId : camera Id
8349 *
8350 * RETURN : int32_t type of status
8351 * NO_ERROR -- success
8352 * none-zero failure code
8353 *==========================================================================*/
8354int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8355{
8356 int rc = 0;
8357 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008358 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008359
8360 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8361 if (rc) {
8362 LOGE("camera_open failed. rc = %d", rc);
8363 goto open_failed;
8364 }
8365 if (!cameraHandle) {
8366 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8367 goto open_failed;
8368 }
8369
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008370 handle = get_main_camera_handle(cameraHandle->camera_handle);
8371 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8372 if (gCamCapability[cameraId] == NULL) {
8373 rc = FAILED_TRANSACTION;
8374 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008375 }
8376
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008377 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008378 if (is_dual_camera_by_idx(cameraId)) {
8379 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8380 gCamCapability[cameraId]->aux_cam_cap =
8381 getCapabilities(cameraHandle->ops, handle);
8382 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8383 rc = FAILED_TRANSACTION;
8384 free(gCamCapability[cameraId]);
8385 goto failed_op;
8386 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008387
8388 // Copy the main camera capability to main_cam_cap struct
8389 gCamCapability[cameraId]->main_cam_cap =
8390 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8391 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8392 LOGE("out of memory");
8393 rc = NO_MEMORY;
8394 goto failed_op;
8395 }
8396 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8397 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008398 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008399failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008400 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8401 cameraHandle = NULL;
8402open_failed:
8403 return rc;
8404}
8405
8406/*==========================================================================
8407 * FUNCTION : get3Aversion
8408 *
8409 * DESCRIPTION: get the Q3A S/W version
8410 *
8411 * PARAMETERS :
8412 * @sw_version: Reference of Q3A structure which will hold version info upon
8413 * return
8414 *
8415 * RETURN : None
8416 *
8417 *==========================================================================*/
8418void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8419{
8420 if(gCamCapability[mCameraId])
8421 sw_version = gCamCapability[mCameraId]->q3a_version;
8422 else
8423 LOGE("Capability structure NULL!");
8424}
8425
8426
8427/*===========================================================================
8428 * FUNCTION : initParameters
8429 *
8430 * DESCRIPTION: initialize camera parameters
8431 *
8432 * PARAMETERS :
8433 *
8434 * RETURN : int32_t type of status
8435 * NO_ERROR -- success
8436 * none-zero failure code
8437 *==========================================================================*/
8438int QCamera3HardwareInterface::initParameters()
8439{
8440 int rc = 0;
8441
8442 //Allocate Set Param Buffer
8443 mParamHeap = new QCamera3HeapMemory(1);
8444 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8445 if(rc != OK) {
8446 rc = NO_MEMORY;
8447 LOGE("Failed to allocate SETPARM Heap memory");
8448 delete mParamHeap;
8449 mParamHeap = NULL;
8450 return rc;
8451 }
8452
8453 //Map memory for parameters buffer
8454 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8455 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8456 mParamHeap->getFd(0),
8457 sizeof(metadata_buffer_t),
8458 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8459 if(rc < 0) {
8460 LOGE("failed to map SETPARM buffer");
8461 rc = FAILED_TRANSACTION;
8462 mParamHeap->deallocate();
8463 delete mParamHeap;
8464 mParamHeap = NULL;
8465 return rc;
8466 }
8467
8468 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8469
8470 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8471 return rc;
8472}
8473
8474/*===========================================================================
8475 * FUNCTION : deinitParameters
8476 *
8477 * DESCRIPTION: de-initialize camera parameters
8478 *
8479 * PARAMETERS :
8480 *
8481 * RETURN : NONE
8482 *==========================================================================*/
8483void QCamera3HardwareInterface::deinitParameters()
8484{
8485 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8486 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8487
8488 mParamHeap->deallocate();
8489 delete mParamHeap;
8490 mParamHeap = NULL;
8491
8492 mParameters = NULL;
8493
8494 free(mPrevParameters);
8495 mPrevParameters = NULL;
8496}
8497
8498/*===========================================================================
8499 * FUNCTION : calcMaxJpegSize
8500 *
8501 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8502 *
8503 * PARAMETERS :
8504 *
8505 * RETURN : max_jpeg_size
8506 *==========================================================================*/
8507size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8508{
8509 size_t max_jpeg_size = 0;
8510 size_t temp_width, temp_height;
8511 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8512 MAX_SIZES_CNT);
8513 for (size_t i = 0; i < count; i++) {
8514 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8515 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8516 if (temp_width * temp_height > max_jpeg_size ) {
8517 max_jpeg_size = temp_width * temp_height;
8518 }
8519 }
8520 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8521 return max_jpeg_size;
8522}
8523
8524/*===========================================================================
8525 * FUNCTION : getMaxRawSize
8526 *
8527 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8528 *
8529 * PARAMETERS :
8530 *
8531 * RETURN : Largest supported Raw Dimension
8532 *==========================================================================*/
8533cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8534{
8535 int max_width = 0;
8536 cam_dimension_t maxRawSize;
8537
8538 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8539 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8540 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8541 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8542 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8543 }
8544 }
8545 return maxRawSize;
8546}
8547
8548
8549/*===========================================================================
8550 * FUNCTION : calcMaxJpegDim
8551 *
8552 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8553 *
8554 * PARAMETERS :
8555 *
8556 * RETURN : max_jpeg_dim
8557 *==========================================================================*/
8558cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8559{
8560 cam_dimension_t max_jpeg_dim;
8561 cam_dimension_t curr_jpeg_dim;
8562 max_jpeg_dim.width = 0;
8563 max_jpeg_dim.height = 0;
8564 curr_jpeg_dim.width = 0;
8565 curr_jpeg_dim.height = 0;
8566 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8567 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8568 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8569 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8570 max_jpeg_dim.width * max_jpeg_dim.height ) {
8571 max_jpeg_dim.width = curr_jpeg_dim.width;
8572 max_jpeg_dim.height = curr_jpeg_dim.height;
8573 }
8574 }
8575 return max_jpeg_dim;
8576}
8577
8578/*===========================================================================
8579 * FUNCTION : addStreamConfig
8580 *
8581 * DESCRIPTION: adds the stream configuration to the array
8582 *
8583 * PARAMETERS :
8584 * @available_stream_configs : pointer to stream configuration array
8585 * @scalar_format : scalar format
8586 * @dim : configuration dimension
8587 * @config_type : input or output configuration type
8588 *
8589 * RETURN : NONE
8590 *==========================================================================*/
8591void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8592 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8593{
8594 available_stream_configs.add(scalar_format);
8595 available_stream_configs.add(dim.width);
8596 available_stream_configs.add(dim.height);
8597 available_stream_configs.add(config_type);
8598}
8599
8600/*===========================================================================
8601 * FUNCTION : suppportBurstCapture
8602 *
8603 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8604 *
8605 * PARAMETERS :
8606 * @cameraId : camera Id
8607 *
8608 * RETURN : true if camera supports BURST_CAPTURE
8609 * false otherwise
8610 *==========================================================================*/
8611bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8612{
8613 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8614 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8615 const int32_t highResWidth = 3264;
8616 const int32_t highResHeight = 2448;
8617
8618 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8619 // Maximum resolution images cannot be captured at >= 10fps
8620 // -> not supporting BURST_CAPTURE
8621 return false;
8622 }
8623
8624 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8625 // Maximum resolution images can be captured at >= 20fps
8626 // --> supporting BURST_CAPTURE
8627 return true;
8628 }
8629
8630 // Find the smallest highRes resolution, or largest resolution if there is none
8631 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8632 MAX_SIZES_CNT);
8633 size_t highRes = 0;
8634 while ((highRes + 1 < totalCnt) &&
8635 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8636 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8637 highResWidth * highResHeight)) {
8638 highRes++;
8639 }
8640 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8641 return true;
8642 } else {
8643 return false;
8644 }
8645}
8646
8647/*===========================================================================
8648 * FUNCTION : initStaticMetadata
8649 *
8650 * DESCRIPTION: initialize the static metadata
8651 *
8652 * PARAMETERS :
8653 * @cameraId : camera Id
8654 *
8655 * RETURN : int32_t type of status
8656 * 0 -- success
8657 * non-zero failure code
8658 *==========================================================================*/
8659int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8660{
8661 int rc = 0;
8662 CameraMetadata staticInfo;
8663 size_t count = 0;
8664 bool limitedDevice = false;
8665 char prop[PROPERTY_VALUE_MAX];
8666 bool supportBurst = false;
8667
8668 supportBurst = supportBurstCapture(cameraId);
8669
8670 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8671 * guaranteed or if min fps of max resolution is less than 20 fps, its
8672 * advertised as limited device*/
8673 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8674 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8675 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8676 !supportBurst;
8677
8678 uint8_t supportedHwLvl = limitedDevice ?
8679 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008680#ifndef USE_HAL_3_3
8681 // LEVEL_3 - This device will support level 3.
8682 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8683#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008684 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008685#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008686
8687 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8688 &supportedHwLvl, 1);
8689
8690 bool facingBack = false;
8691 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8692 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8693 facingBack = true;
8694 }
8695 /*HAL 3 only*/
8696 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8697 &gCamCapability[cameraId]->min_focus_distance, 1);
8698
8699 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8700 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8701
8702 /*should be using focal lengths but sensor doesn't provide that info now*/
8703 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8704 &gCamCapability[cameraId]->focal_length,
8705 1);
8706
8707 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8708 gCamCapability[cameraId]->apertures,
8709 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8710
8711 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8712 gCamCapability[cameraId]->filter_densities,
8713 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8714
8715
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008716 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8717 size_t mode_count =
8718 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8719 for (size_t i = 0; i < mode_count; i++) {
8720 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8721 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008722 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008723 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008724
8725 int32_t lens_shading_map_size[] = {
8726 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8727 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8728 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8729 lens_shading_map_size,
8730 sizeof(lens_shading_map_size)/sizeof(int32_t));
8731
8732 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8733 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8734
8735 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8736 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8737
8738 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8739 &gCamCapability[cameraId]->max_frame_duration, 1);
8740
8741 camera_metadata_rational baseGainFactor = {
8742 gCamCapability[cameraId]->base_gain_factor.numerator,
8743 gCamCapability[cameraId]->base_gain_factor.denominator};
8744 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8745 &baseGainFactor, 1);
8746
8747 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8748 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8749
8750 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8751 gCamCapability[cameraId]->pixel_array_size.height};
8752 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8753 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8754
8755 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8756 gCamCapability[cameraId]->active_array_size.top,
8757 gCamCapability[cameraId]->active_array_size.width,
8758 gCamCapability[cameraId]->active_array_size.height};
8759 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8760 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8761
8762 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8763 &gCamCapability[cameraId]->white_level, 1);
8764
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008765 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8766 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8767 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008768 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008769 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008770
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008771#ifndef USE_HAL_3_3
8772 bool hasBlackRegions = false;
8773 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8774 LOGW("black_region_count: %d is bounded to %d",
8775 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8776 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8777 }
8778 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8779 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8780 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8781 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8782 }
8783 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8784 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8785 hasBlackRegions = true;
8786 }
8787#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008788 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8789 &gCamCapability[cameraId]->flash_charge_duration, 1);
8790
8791 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8792 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8793
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008794 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8795 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8796 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008797 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8798 &timestampSource, 1);
8799
Thierry Strudel54dc9782017-02-15 12:12:10 -08008800 //update histogram vendor data
8801 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008802 &gCamCapability[cameraId]->histogram_size, 1);
8803
Thierry Strudel54dc9782017-02-15 12:12:10 -08008804 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008805 &gCamCapability[cameraId]->max_histogram_count, 1);
8806
Shuzhen Wang14415f52016-11-16 18:26:18 -08008807 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
8808 //so that app can request fewer number of bins than the maximum supported.
8809 std::vector<int32_t> histBins;
8810 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
8811 histBins.push_back(maxHistBins);
8812 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
8813 (maxHistBins & 0x1) == 0) {
8814 histBins.push_back(maxHistBins >> 1);
8815 maxHistBins >>= 1;
8816 }
8817 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
8818 histBins.data(), histBins.size());
8819
Thierry Strudel3d639192016-09-09 11:52:26 -07008820 int32_t sharpness_map_size[] = {
8821 gCamCapability[cameraId]->sharpness_map_size.width,
8822 gCamCapability[cameraId]->sharpness_map_size.height};
8823
8824 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8825 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8826
8827 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8828 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8829
8830 int32_t scalar_formats[] = {
8831 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8832 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8833 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8834 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8835 HAL_PIXEL_FORMAT_RAW10,
8836 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8837 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8838 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8839 scalar_formats,
8840 scalar_formats_count);
8841
8842 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8843 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8844 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8845 count, MAX_SIZES_CNT, available_processed_sizes);
8846 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8847 available_processed_sizes, count * 2);
8848
8849 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8850 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8851 makeTable(gCamCapability[cameraId]->raw_dim,
8852 count, MAX_SIZES_CNT, available_raw_sizes);
8853 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8854 available_raw_sizes, count * 2);
8855
8856 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8857 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8858 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8859 count, MAX_SIZES_CNT, available_fps_ranges);
8860 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8861 available_fps_ranges, count * 2);
8862
8863 camera_metadata_rational exposureCompensationStep = {
8864 gCamCapability[cameraId]->exp_compensation_step.numerator,
8865 gCamCapability[cameraId]->exp_compensation_step.denominator};
8866 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8867 &exposureCompensationStep, 1);
8868
8869 Vector<uint8_t> availableVstabModes;
8870 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8871 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008872 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008873 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008874 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008875 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008876 count = IS_TYPE_MAX;
8877 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8878 for (size_t i = 0; i < count; i++) {
8879 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8880 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8881 eisSupported = true;
8882 break;
8883 }
8884 }
8885 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008886 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8887 }
8888 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8889 availableVstabModes.array(), availableVstabModes.size());
8890
8891 /*HAL 1 and HAL 3 common*/
8892 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8893 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8894 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8895 float maxZoom = maxZoomStep/minZoomStep;
8896 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8897 &maxZoom, 1);
8898
8899 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8900 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8901
8902 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8903 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8904 max3aRegions[2] = 0; /* AF not supported */
8905 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8906 max3aRegions, 3);
8907
8908 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8909 memset(prop, 0, sizeof(prop));
8910 property_get("persist.camera.facedetect", prop, "1");
8911 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8912 LOGD("Support face detection mode: %d",
8913 supportedFaceDetectMode);
8914
8915 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008916 /* support mode should be OFF if max number of face is 0 */
8917 if (maxFaces <= 0) {
8918 supportedFaceDetectMode = 0;
8919 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008920 Vector<uint8_t> availableFaceDetectModes;
8921 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8922 if (supportedFaceDetectMode == 1) {
8923 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8924 } else if (supportedFaceDetectMode == 2) {
8925 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8926 } else if (supportedFaceDetectMode == 3) {
8927 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8928 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8929 } else {
8930 maxFaces = 0;
8931 }
8932 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8933 availableFaceDetectModes.array(),
8934 availableFaceDetectModes.size());
8935 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8936 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008937 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8938 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8939 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008940
Emilian Peev7650c122017-01-19 08:24:33 -08008941#ifdef SUPPORT_DEPTH_DATA
Emilian Peev0ce959f2017-03-07 16:49:49 +00008942 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8943 //TODO: Update depth size accordingly, currently we use active array
8944 // as reference.
8945 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8946 int32_t depthHeight =
8947 gCamCapability[cameraId]->active_array_size.height;
8948 //As per spec. depth cloud should be sample count / 16
8949 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8950 assert(0 < depthSamplesCount);
8951 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
8952 &depthSamplesCount, 1);
Emilian Peev7650c122017-01-19 08:24:33 -08008953
Emilian Peev0ce959f2017-03-07 16:49:49 +00008954 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8955 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8956 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8957 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008958
Emilian Peev0ce959f2017-03-07 16:49:49 +00008959 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8960 1, 1 };
8961 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8962 depthMinDuration,
8963 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008964
Emilian Peev0ce959f2017-03-07 16:49:49 +00008965 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB,
8966 depthSamplesCount, 1, 0 };
8967 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8968 depthStallDuration,
8969 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008970
Emilian Peev0ce959f2017-03-07 16:49:49 +00008971 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8972 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8973 }
Emilian Peev7650c122017-01-19 08:24:33 -08008974#endif
8975
Thierry Strudel3d639192016-09-09 11:52:26 -07008976 int32_t exposureCompensationRange[] = {
8977 gCamCapability[cameraId]->exposure_compensation_min,
8978 gCamCapability[cameraId]->exposure_compensation_max};
8979 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8980 exposureCompensationRange,
8981 sizeof(exposureCompensationRange)/sizeof(int32_t));
8982
8983 uint8_t lensFacing = (facingBack) ?
8984 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8985 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8986
8987 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8988 available_thumbnail_sizes,
8989 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8990
8991 /*all sizes will be clubbed into this tag*/
8992 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8993 /*android.scaler.availableStreamConfigurations*/
8994 Vector<int32_t> available_stream_configs;
8995 cam_dimension_t active_array_dim;
8996 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8997 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08008998
8999 /*advertise list of input dimensions supported based on below property.
9000 By default all sizes upto 5MP will be advertised.
9001 Note that the setprop resolution format should be WxH.
9002 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9003 To list all supported sizes, setprop needs to be set with "0x0" */
9004 cam_dimension_t minInputSize = {2592,1944}; //5MP
9005 memset(prop, 0, sizeof(prop));
9006 property_get("persist.camera.input.minsize", prop, "2592x1944");
9007 if (strlen(prop) > 0) {
9008 char *saveptr = NULL;
9009 char *token = strtok_r(prop, "x", &saveptr);
9010 if (token != NULL) {
9011 minInputSize.width = atoi(token);
9012 }
9013 token = strtok_r(NULL, "x", &saveptr);
9014 if (token != NULL) {
9015 minInputSize.height = atoi(token);
9016 }
9017 }
9018
Thierry Strudel3d639192016-09-09 11:52:26 -07009019 /* Add input/output stream configurations for each scalar formats*/
9020 for (size_t j = 0; j < scalar_formats_count; j++) {
9021 switch (scalar_formats[j]) {
9022 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9023 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9024 case HAL_PIXEL_FORMAT_RAW10:
9025 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9026 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9027 addStreamConfig(available_stream_configs, scalar_formats[j],
9028 gCamCapability[cameraId]->raw_dim[i],
9029 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9030 }
9031 break;
9032 case HAL_PIXEL_FORMAT_BLOB:
9033 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9034 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9035 addStreamConfig(available_stream_configs, scalar_formats[j],
9036 gCamCapability[cameraId]->picture_sizes_tbl[i],
9037 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9038 }
9039 break;
9040 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9041 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9042 default:
9043 cam_dimension_t largest_picture_size;
9044 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9045 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9046 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9047 addStreamConfig(available_stream_configs, scalar_formats[j],
9048 gCamCapability[cameraId]->picture_sizes_tbl[i],
9049 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009050 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9051 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9052 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9053 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9054 >= minInputSize.width) || (gCamCapability[cameraId]->
9055 picture_sizes_tbl[i].height >= minInputSize.height)) {
9056 addStreamConfig(available_stream_configs, scalar_formats[j],
9057 gCamCapability[cameraId]->picture_sizes_tbl[i],
9058 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9059 }
9060 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009061 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009062
Thierry Strudel3d639192016-09-09 11:52:26 -07009063 break;
9064 }
9065 }
9066
9067 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9068 available_stream_configs.array(), available_stream_configs.size());
9069 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9070 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9071
9072 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9073 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9074
9075 /* android.scaler.availableMinFrameDurations */
9076 Vector<int64_t> available_min_durations;
9077 for (size_t j = 0; j < scalar_formats_count; j++) {
9078 switch (scalar_formats[j]) {
9079 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9080 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9081 case HAL_PIXEL_FORMAT_RAW10:
9082 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9083 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9084 available_min_durations.add(scalar_formats[j]);
9085 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9086 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9087 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9088 }
9089 break;
9090 default:
9091 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9092 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9093 available_min_durations.add(scalar_formats[j]);
9094 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9095 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9096 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9097 }
9098 break;
9099 }
9100 }
9101 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9102 available_min_durations.array(), available_min_durations.size());
9103
9104 Vector<int32_t> available_hfr_configs;
9105 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9106 int32_t fps = 0;
9107 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9108 case CAM_HFR_MODE_60FPS:
9109 fps = 60;
9110 break;
9111 case CAM_HFR_MODE_90FPS:
9112 fps = 90;
9113 break;
9114 case CAM_HFR_MODE_120FPS:
9115 fps = 120;
9116 break;
9117 case CAM_HFR_MODE_150FPS:
9118 fps = 150;
9119 break;
9120 case CAM_HFR_MODE_180FPS:
9121 fps = 180;
9122 break;
9123 case CAM_HFR_MODE_210FPS:
9124 fps = 210;
9125 break;
9126 case CAM_HFR_MODE_240FPS:
9127 fps = 240;
9128 break;
9129 case CAM_HFR_MODE_480FPS:
9130 fps = 480;
9131 break;
9132 case CAM_HFR_MODE_OFF:
9133 case CAM_HFR_MODE_MAX:
9134 default:
9135 break;
9136 }
9137
9138 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9139 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9140 /* For each HFR frame rate, need to advertise one variable fps range
9141 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9142 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9143 * set by the app. When video recording is started, [120, 120] is
9144 * set. This way sensor configuration does not change when recording
9145 * is started */
9146
9147 /* (width, height, fps_min, fps_max, batch_size_max) */
9148 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9149 j < MAX_SIZES_CNT; j++) {
9150 available_hfr_configs.add(
9151 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9152 available_hfr_configs.add(
9153 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9154 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9155 available_hfr_configs.add(fps);
9156 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9157
9158 /* (width, height, fps_min, fps_max, batch_size_max) */
9159 available_hfr_configs.add(
9160 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9161 available_hfr_configs.add(
9162 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9163 available_hfr_configs.add(fps);
9164 available_hfr_configs.add(fps);
9165 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9166 }
9167 }
9168 }
9169 //Advertise HFR capability only if the property is set
9170 memset(prop, 0, sizeof(prop));
9171 property_get("persist.camera.hal3hfr.enable", prop, "1");
9172 uint8_t hfrEnable = (uint8_t)atoi(prop);
9173
9174 if(hfrEnable && available_hfr_configs.array()) {
9175 staticInfo.update(
9176 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9177 available_hfr_configs.array(), available_hfr_configs.size());
9178 }
9179
9180 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9181 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9182 &max_jpeg_size, 1);
9183
9184 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9185 size_t size = 0;
9186 count = CAM_EFFECT_MODE_MAX;
9187 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9188 for (size_t i = 0; i < count; i++) {
9189 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9190 gCamCapability[cameraId]->supported_effects[i]);
9191 if (NAME_NOT_FOUND != val) {
9192 avail_effects[size] = (uint8_t)val;
9193 size++;
9194 }
9195 }
9196 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9197 avail_effects,
9198 size);
9199
9200 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9201 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9202 size_t supported_scene_modes_cnt = 0;
9203 count = CAM_SCENE_MODE_MAX;
9204 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9205 for (size_t i = 0; i < count; i++) {
9206 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9207 CAM_SCENE_MODE_OFF) {
9208 int val = lookupFwkName(SCENE_MODES_MAP,
9209 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9210 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009211
Thierry Strudel3d639192016-09-09 11:52:26 -07009212 if (NAME_NOT_FOUND != val) {
9213 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9214 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9215 supported_scene_modes_cnt++;
9216 }
9217 }
9218 }
9219 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9220 avail_scene_modes,
9221 supported_scene_modes_cnt);
9222
9223 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9224 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9225 supported_scene_modes_cnt,
9226 CAM_SCENE_MODE_MAX,
9227 scene_mode_overrides,
9228 supported_indexes,
9229 cameraId);
9230
9231 if (supported_scene_modes_cnt == 0) {
9232 supported_scene_modes_cnt = 1;
9233 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9234 }
9235
9236 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9237 scene_mode_overrides, supported_scene_modes_cnt * 3);
9238
9239 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9240 ANDROID_CONTROL_MODE_AUTO,
9241 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9242 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9243 available_control_modes,
9244 3);
9245
9246 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9247 size = 0;
9248 count = CAM_ANTIBANDING_MODE_MAX;
9249 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9250 for (size_t i = 0; i < count; i++) {
9251 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9252 gCamCapability[cameraId]->supported_antibandings[i]);
9253 if (NAME_NOT_FOUND != val) {
9254 avail_antibanding_modes[size] = (uint8_t)val;
9255 size++;
9256 }
9257
9258 }
9259 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9260 avail_antibanding_modes,
9261 size);
9262
9263 uint8_t avail_abberation_modes[] = {
9264 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9265 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9266 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9267 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9268 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9269 if (0 == count) {
9270 // If no aberration correction modes are available for a device, this advertise OFF mode
9271 size = 1;
9272 } else {
9273 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9274 // So, advertize all 3 modes if atleast any one mode is supported as per the
9275 // new M requirement
9276 size = 3;
9277 }
9278 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9279 avail_abberation_modes,
9280 size);
9281
9282 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9283 size = 0;
9284 count = CAM_FOCUS_MODE_MAX;
9285 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9286 for (size_t i = 0; i < count; i++) {
9287 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9288 gCamCapability[cameraId]->supported_focus_modes[i]);
9289 if (NAME_NOT_FOUND != val) {
9290 avail_af_modes[size] = (uint8_t)val;
9291 size++;
9292 }
9293 }
9294 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9295 avail_af_modes,
9296 size);
9297
9298 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9299 size = 0;
9300 count = CAM_WB_MODE_MAX;
9301 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9302 for (size_t i = 0; i < count; i++) {
9303 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9304 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9305 gCamCapability[cameraId]->supported_white_balances[i]);
9306 if (NAME_NOT_FOUND != val) {
9307 avail_awb_modes[size] = (uint8_t)val;
9308 size++;
9309 }
9310 }
9311 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9312 avail_awb_modes,
9313 size);
9314
9315 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9316 count = CAM_FLASH_FIRING_LEVEL_MAX;
9317 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9318 count);
9319 for (size_t i = 0; i < count; i++) {
9320 available_flash_levels[i] =
9321 gCamCapability[cameraId]->supported_firing_levels[i];
9322 }
9323 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9324 available_flash_levels, count);
9325
9326 uint8_t flashAvailable;
9327 if (gCamCapability[cameraId]->flash_available)
9328 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9329 else
9330 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9331 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9332 &flashAvailable, 1);
9333
9334 Vector<uint8_t> avail_ae_modes;
9335 count = CAM_AE_MODE_MAX;
9336 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9337 for (size_t i = 0; i < count; i++) {
9338 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9339 }
9340 if (flashAvailable) {
9341 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9342 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009343 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009344 }
9345 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9346 avail_ae_modes.array(),
9347 avail_ae_modes.size());
9348
9349 int32_t sensitivity_range[2];
9350 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9351 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9352 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9353 sensitivity_range,
9354 sizeof(sensitivity_range) / sizeof(int32_t));
9355
9356 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9357 &gCamCapability[cameraId]->max_analog_sensitivity,
9358 1);
9359
9360 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9361 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9362 &sensor_orientation,
9363 1);
9364
9365 int32_t max_output_streams[] = {
9366 MAX_STALLING_STREAMS,
9367 MAX_PROCESSED_STREAMS,
9368 MAX_RAW_STREAMS};
9369 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9370 max_output_streams,
9371 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9372
9373 uint8_t avail_leds = 0;
9374 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9375 &avail_leds, 0);
9376
9377 uint8_t focus_dist_calibrated;
9378 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9379 gCamCapability[cameraId]->focus_dist_calibrated);
9380 if (NAME_NOT_FOUND != val) {
9381 focus_dist_calibrated = (uint8_t)val;
9382 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9383 &focus_dist_calibrated, 1);
9384 }
9385
9386 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9387 size = 0;
9388 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9389 MAX_TEST_PATTERN_CNT);
9390 for (size_t i = 0; i < count; i++) {
9391 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9392 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9393 if (NAME_NOT_FOUND != testpatternMode) {
9394 avail_testpattern_modes[size] = testpatternMode;
9395 size++;
9396 }
9397 }
9398 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9399 avail_testpattern_modes,
9400 size);
9401
9402 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9403 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9404 &max_pipeline_depth,
9405 1);
9406
9407 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9408 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9409 &partial_result_count,
9410 1);
9411
9412 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9413 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9414
9415 Vector<uint8_t> available_capabilities;
9416 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9417 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9418 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9419 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9420 if (supportBurst) {
9421 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9422 }
9423 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9424 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9425 if (hfrEnable && available_hfr_configs.array()) {
9426 available_capabilities.add(
9427 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9428 }
9429
9430 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9431 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9432 }
9433 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9434 available_capabilities.array(),
9435 available_capabilities.size());
9436
9437 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9438 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9439 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9440 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9441
9442 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9443 &aeLockAvailable, 1);
9444
9445 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9446 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9447 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9448 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9449
9450 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9451 &awbLockAvailable, 1);
9452
9453 int32_t max_input_streams = 1;
9454 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9455 &max_input_streams,
9456 1);
9457
9458 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9459 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9460 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9461 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9462 HAL_PIXEL_FORMAT_YCbCr_420_888};
9463 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9464 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9465
9466 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9467 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9468 &max_latency,
9469 1);
9470
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009471#ifndef USE_HAL_3_3
9472 int32_t isp_sensitivity_range[2];
9473 isp_sensitivity_range[0] =
9474 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9475 isp_sensitivity_range[1] =
9476 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9477 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9478 isp_sensitivity_range,
9479 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9480#endif
9481
Thierry Strudel3d639192016-09-09 11:52:26 -07009482 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9483 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9484 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9485 available_hot_pixel_modes,
9486 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9487
9488 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9489 ANDROID_SHADING_MODE_FAST,
9490 ANDROID_SHADING_MODE_HIGH_QUALITY};
9491 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9492 available_shading_modes,
9493 3);
9494
9495 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9496 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9497 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9498 available_lens_shading_map_modes,
9499 2);
9500
9501 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9502 ANDROID_EDGE_MODE_FAST,
9503 ANDROID_EDGE_MODE_HIGH_QUALITY,
9504 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9505 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9506 available_edge_modes,
9507 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9508
9509 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9510 ANDROID_NOISE_REDUCTION_MODE_FAST,
9511 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9512 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9513 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9514 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9515 available_noise_red_modes,
9516 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9517
9518 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9519 ANDROID_TONEMAP_MODE_FAST,
9520 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9521 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9522 available_tonemap_modes,
9523 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9524
9525 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9526 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9527 available_hot_pixel_map_modes,
9528 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9529
9530 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9531 gCamCapability[cameraId]->reference_illuminant1);
9532 if (NAME_NOT_FOUND != val) {
9533 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9534 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9535 }
9536
9537 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9538 gCamCapability[cameraId]->reference_illuminant2);
9539 if (NAME_NOT_FOUND != val) {
9540 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9541 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9542 }
9543
9544 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9545 (void *)gCamCapability[cameraId]->forward_matrix1,
9546 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9547
9548 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9549 (void *)gCamCapability[cameraId]->forward_matrix2,
9550 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9551
9552 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9553 (void *)gCamCapability[cameraId]->color_transform1,
9554 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9555
9556 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9557 (void *)gCamCapability[cameraId]->color_transform2,
9558 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9559
9560 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9561 (void *)gCamCapability[cameraId]->calibration_transform1,
9562 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9563
9564 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9565 (void *)gCamCapability[cameraId]->calibration_transform2,
9566 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9567
9568 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9569 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9570 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9571 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9572 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9573 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9574 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9575 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9576 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9577 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9578 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9579 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9580 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9581 ANDROID_JPEG_GPS_COORDINATES,
9582 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9583 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9584 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9585 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9586 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9587 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9588 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9589 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9590 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9591 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009592#ifndef USE_HAL_3_3
9593 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9594#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009595 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009596 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009597 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9598 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009599 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009600 /* DevCamDebug metadata request_keys_basic */
9601 DEVCAMDEBUG_META_ENABLE,
9602 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009603 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9604 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009605 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009606
9607 size_t request_keys_cnt =
9608 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9609 Vector<int32_t> available_request_keys;
9610 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9611 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9612 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9613 }
9614
9615 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9616 available_request_keys.array(), available_request_keys.size());
9617
9618 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9619 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9620 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9621 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9622 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9623 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9624 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9625 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9626 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9627 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9628 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9629 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9630 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9631 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9632 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9633 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9634 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009635 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009636 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9637 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9638 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009639 ANDROID_STATISTICS_FACE_SCORES,
9640#ifndef USE_HAL_3_3
9641 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9642#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009643 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009644 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009645 // DevCamDebug metadata result_keys_basic
9646 DEVCAMDEBUG_META_ENABLE,
9647 // DevCamDebug metadata result_keys AF
9648 DEVCAMDEBUG_AF_LENS_POSITION,
9649 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9650 DEVCAMDEBUG_AF_TOF_DISTANCE,
9651 DEVCAMDEBUG_AF_LUMA,
9652 DEVCAMDEBUG_AF_HAF_STATE,
9653 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9654 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9655 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9656 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9657 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9658 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9659 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9660 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9661 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9662 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9663 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9664 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9665 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9666 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9667 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9668 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9669 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9670 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9671 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9672 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9673 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9674 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9675 // DevCamDebug metadata result_keys AEC
9676 DEVCAMDEBUG_AEC_TARGET_LUMA,
9677 DEVCAMDEBUG_AEC_COMP_LUMA,
9678 DEVCAMDEBUG_AEC_AVG_LUMA,
9679 DEVCAMDEBUG_AEC_CUR_LUMA,
9680 DEVCAMDEBUG_AEC_LINECOUNT,
9681 DEVCAMDEBUG_AEC_REAL_GAIN,
9682 DEVCAMDEBUG_AEC_EXP_INDEX,
9683 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009684 // DevCamDebug metadata result_keys zzHDR
9685 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9686 DEVCAMDEBUG_AEC_L_LINECOUNT,
9687 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9688 DEVCAMDEBUG_AEC_S_LINECOUNT,
9689 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9690 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9691 // DevCamDebug metadata result_keys ADRC
9692 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9693 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9694 DEVCAMDEBUG_AEC_GTM_RATIO,
9695 DEVCAMDEBUG_AEC_LTM_RATIO,
9696 DEVCAMDEBUG_AEC_LA_RATIO,
9697 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009698 // DevCamDebug metadata result_keys AWB
9699 DEVCAMDEBUG_AWB_R_GAIN,
9700 DEVCAMDEBUG_AWB_G_GAIN,
9701 DEVCAMDEBUG_AWB_B_GAIN,
9702 DEVCAMDEBUG_AWB_CCT,
9703 DEVCAMDEBUG_AWB_DECISION,
9704 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009705 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9706 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9707 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009708 };
9709
Thierry Strudel3d639192016-09-09 11:52:26 -07009710 size_t result_keys_cnt =
9711 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9712
9713 Vector<int32_t> available_result_keys;
9714 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9715 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9716 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9717 }
9718 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9719 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9720 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9721 }
9722 if (supportedFaceDetectMode == 1) {
9723 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9724 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9725 } else if ((supportedFaceDetectMode == 2) ||
9726 (supportedFaceDetectMode == 3)) {
9727 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9728 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9729 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009730#ifndef USE_HAL_3_3
9731 if (hasBlackRegions) {
9732 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9733 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9734 }
9735#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009736 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9737 available_result_keys.array(), available_result_keys.size());
9738
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009739 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009740 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9741 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9742 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9743 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9744 ANDROID_SCALER_CROPPING_TYPE,
9745 ANDROID_SYNC_MAX_LATENCY,
9746 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9747 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9748 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9749 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9750 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9751 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9752 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9753 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9754 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9755 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9756 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9757 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9758 ANDROID_LENS_FACING,
9759 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9760 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9761 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9762 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9763 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9764 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9765 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9766 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9767 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9768 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9769 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9770 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9771 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9772 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9773 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9774 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9775 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9776 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9777 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9778 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009779 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009780 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9781 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9782 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9783 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9784 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9785 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9786 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9787 ANDROID_CONTROL_AVAILABLE_MODES,
9788 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9789 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9790 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9791 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009792 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009793#ifdef SUPPORT_DEPTH_DATA
9794 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9795 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9796 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9797 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9798 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9799#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009800#ifndef USE_HAL_3_3
9801 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9802 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9803#endif
9804 };
9805
9806 Vector<int32_t> available_characteristics_keys;
9807 available_characteristics_keys.appendArray(characteristics_keys_basic,
9808 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9809#ifndef USE_HAL_3_3
9810 if (hasBlackRegions) {
9811 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9812 }
9813#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009814 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009815 available_characteristics_keys.array(),
9816 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009817
9818 /*available stall durations depend on the hw + sw and will be different for different devices */
9819 /*have to add for raw after implementation*/
9820 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9821 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9822
9823 Vector<int64_t> available_stall_durations;
9824 for (uint32_t j = 0; j < stall_formats_count; j++) {
9825 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9826 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9827 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9828 available_stall_durations.add(stall_formats[j]);
9829 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9830 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9831 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9832 }
9833 } else {
9834 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9835 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9836 available_stall_durations.add(stall_formats[j]);
9837 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9838 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9839 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9840 }
9841 }
9842 }
9843 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9844 available_stall_durations.array(),
9845 available_stall_durations.size());
9846
9847 //QCAMERA3_OPAQUE_RAW
9848 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9849 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9850 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9851 case LEGACY_RAW:
9852 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9853 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9854 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9855 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9856 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9857 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9858 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9859 break;
9860 case MIPI_RAW:
9861 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9862 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9863 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9864 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9865 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9866 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9867 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9868 break;
9869 default:
9870 LOGE("unknown opaque_raw_format %d",
9871 gCamCapability[cameraId]->opaque_raw_fmt);
9872 break;
9873 }
9874 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9875
9876 Vector<int32_t> strides;
9877 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9878 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9879 cam_stream_buf_plane_info_t buf_planes;
9880 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9881 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9882 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9883 &gCamCapability[cameraId]->padding_info, &buf_planes);
9884 strides.add(buf_planes.plane_info.mp[0].stride);
9885 }
9886 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9887 strides.size());
9888
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009889 //TBD: remove the following line once backend advertises zzHDR in feature mask
9890 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009891 //Video HDR default
9892 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9893 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009894 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009895 int32_t vhdr_mode[] = {
9896 QCAMERA3_VIDEO_HDR_MODE_OFF,
9897 QCAMERA3_VIDEO_HDR_MODE_ON};
9898
9899 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9900 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9901 vhdr_mode, vhdr_mode_count);
9902 }
9903
Thierry Strudel3d639192016-09-09 11:52:26 -07009904 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9905 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9906 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9907
9908 uint8_t isMonoOnly =
9909 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9910 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9911 &isMonoOnly, 1);
9912
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009913#ifndef USE_HAL_3_3
9914 Vector<int32_t> opaque_size;
9915 for (size_t j = 0; j < scalar_formats_count; j++) {
9916 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9917 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9918 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9919 cam_stream_buf_plane_info_t buf_planes;
9920
9921 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9922 &gCamCapability[cameraId]->padding_info, &buf_planes);
9923
9924 if (rc == 0) {
9925 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9926 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9927 opaque_size.add(buf_planes.plane_info.frame_len);
9928 }else {
9929 LOGE("raw frame calculation failed!");
9930 }
9931 }
9932 }
9933 }
9934
9935 if ((opaque_size.size() > 0) &&
9936 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9937 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9938 else
9939 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9940#endif
9941
Thierry Strudel04e026f2016-10-10 11:27:36 -07009942 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9943 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9944 size = 0;
9945 count = CAM_IR_MODE_MAX;
9946 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9947 for (size_t i = 0; i < count; i++) {
9948 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9949 gCamCapability[cameraId]->supported_ir_modes[i]);
9950 if (NAME_NOT_FOUND != val) {
9951 avail_ir_modes[size] = (int32_t)val;
9952 size++;
9953 }
9954 }
9955 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9956 avail_ir_modes, size);
9957 }
9958
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009959 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9960 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9961 size = 0;
9962 count = CAM_AEC_CONVERGENCE_MAX;
9963 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9964 for (size_t i = 0; i < count; i++) {
9965 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9966 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9967 if (NAME_NOT_FOUND != val) {
9968 available_instant_aec_modes[size] = (int32_t)val;
9969 size++;
9970 }
9971 }
9972 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9973 available_instant_aec_modes, size);
9974 }
9975
Thierry Strudel54dc9782017-02-15 12:12:10 -08009976 int32_t sharpness_range[] = {
9977 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9978 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9979 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9980
9981 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9982 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9983 size = 0;
9984 count = CAM_BINNING_CORRECTION_MODE_MAX;
9985 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9986 for (size_t i = 0; i < count; i++) {
9987 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9988 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9989 gCamCapability[cameraId]->supported_binning_modes[i]);
9990 if (NAME_NOT_FOUND != val) {
9991 avail_binning_modes[size] = (int32_t)val;
9992 size++;
9993 }
9994 }
9995 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9996 avail_binning_modes, size);
9997 }
9998
9999 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10000 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10001 size = 0;
10002 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10003 for (size_t i = 0; i < count; i++) {
10004 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10005 gCamCapability[cameraId]->supported_aec_modes[i]);
10006 if (NAME_NOT_FOUND != val)
10007 available_aec_modes[size++] = val;
10008 }
10009 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10010 available_aec_modes, size);
10011 }
10012
10013 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10014 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10015 size = 0;
10016 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10017 for (size_t i = 0; i < count; i++) {
10018 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10019 gCamCapability[cameraId]->supported_iso_modes[i]);
10020 if (NAME_NOT_FOUND != val)
10021 available_iso_modes[size++] = val;
10022 }
10023 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10024 available_iso_modes, size);
10025 }
10026
10027 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10028 for (size_t i = 0; i < count; i++)
10029 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10030 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10031 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10032
10033 int32_t available_saturation_range[4];
10034 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10035 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10036 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10037 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10038 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10039 available_saturation_range, 4);
10040
10041 uint8_t is_hdr_values[2];
10042 is_hdr_values[0] = 0;
10043 is_hdr_values[1] = 1;
10044 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10045 is_hdr_values, 2);
10046
10047 float is_hdr_confidence_range[2];
10048 is_hdr_confidence_range[0] = 0.0;
10049 is_hdr_confidence_range[1] = 1.0;
10050 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10051 is_hdr_confidence_range, 2);
10052
Emilian Peev0a972ef2017-03-16 10:25:53 +000010053 size_t eepromLength = strnlen(
10054 reinterpret_cast<const char *>(
10055 gCamCapability[cameraId]->eeprom_version_info),
10056 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10057 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010058 char easelInfo[] = ",E:N";
10059 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10060 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10061 eepromLength += sizeof(easelInfo);
10062 strlcat(eepromInfo, (gHdrPlusClient ? ",E:Y" : ",E:N"), MAX_EEPROM_VERSION_INFO_LEN);
10063 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010064 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10065 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10066 }
10067
Thierry Strudel3d639192016-09-09 11:52:26 -070010068 gStaticMetadata[cameraId] = staticInfo.release();
10069 return rc;
10070}
10071
10072/*===========================================================================
10073 * FUNCTION : makeTable
10074 *
10075 * DESCRIPTION: make a table of sizes
10076 *
10077 * PARAMETERS :
10078 *
10079 *
10080 *==========================================================================*/
10081void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10082 size_t max_size, int32_t *sizeTable)
10083{
10084 size_t j = 0;
10085 if (size > max_size) {
10086 size = max_size;
10087 }
10088 for (size_t i = 0; i < size; i++) {
10089 sizeTable[j] = dimTable[i].width;
10090 sizeTable[j+1] = dimTable[i].height;
10091 j+=2;
10092 }
10093}
10094
10095/*===========================================================================
10096 * FUNCTION : makeFPSTable
10097 *
10098 * DESCRIPTION: make a table of fps ranges
10099 *
10100 * PARAMETERS :
10101 *
10102 *==========================================================================*/
10103void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10104 size_t max_size, int32_t *fpsRangesTable)
10105{
10106 size_t j = 0;
10107 if (size > max_size) {
10108 size = max_size;
10109 }
10110 for (size_t i = 0; i < size; i++) {
10111 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10112 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10113 j+=2;
10114 }
10115}
10116
10117/*===========================================================================
10118 * FUNCTION : makeOverridesList
10119 *
10120 * DESCRIPTION: make a list of scene mode overrides
10121 *
10122 * PARAMETERS :
10123 *
10124 *
10125 *==========================================================================*/
10126void QCamera3HardwareInterface::makeOverridesList(
10127 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10128 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10129{
10130 /*daemon will give a list of overrides for all scene modes.
10131 However we should send the fwk only the overrides for the scene modes
10132 supported by the framework*/
10133 size_t j = 0;
10134 if (size > max_size) {
10135 size = max_size;
10136 }
10137 size_t focus_count = CAM_FOCUS_MODE_MAX;
10138 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10139 focus_count);
10140 for (size_t i = 0; i < size; i++) {
10141 bool supt = false;
10142 size_t index = supported_indexes[i];
10143 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10144 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10145 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10146 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10147 overridesTable[index].awb_mode);
10148 if (NAME_NOT_FOUND != val) {
10149 overridesList[j+1] = (uint8_t)val;
10150 }
10151 uint8_t focus_override = overridesTable[index].af_mode;
10152 for (size_t k = 0; k < focus_count; k++) {
10153 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10154 supt = true;
10155 break;
10156 }
10157 }
10158 if (supt) {
10159 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10160 focus_override);
10161 if (NAME_NOT_FOUND != val) {
10162 overridesList[j+2] = (uint8_t)val;
10163 }
10164 } else {
10165 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10166 }
10167 j+=3;
10168 }
10169}
10170
10171/*===========================================================================
10172 * FUNCTION : filterJpegSizes
10173 *
10174 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10175 * could be downscaled to
10176 *
10177 * PARAMETERS :
10178 *
10179 * RETURN : length of jpegSizes array
10180 *==========================================================================*/
10181
10182size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10183 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10184 uint8_t downscale_factor)
10185{
10186 if (0 == downscale_factor) {
10187 downscale_factor = 1;
10188 }
10189
10190 int32_t min_width = active_array_size.width / downscale_factor;
10191 int32_t min_height = active_array_size.height / downscale_factor;
10192 size_t jpegSizesCnt = 0;
10193 if (processedSizesCnt > maxCount) {
10194 processedSizesCnt = maxCount;
10195 }
10196 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10197 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10198 jpegSizes[jpegSizesCnt] = processedSizes[i];
10199 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10200 jpegSizesCnt += 2;
10201 }
10202 }
10203 return jpegSizesCnt;
10204}
10205
10206/*===========================================================================
10207 * FUNCTION : computeNoiseModelEntryS
10208 *
10209 * DESCRIPTION: function to map a given sensitivity to the S noise
10210 * model parameters in the DNG noise model.
10211 *
10212 * PARAMETERS : sens : the sensor sensitivity
10213 *
10214 ** RETURN : S (sensor amplification) noise
10215 *
10216 *==========================================================================*/
10217double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10218 double s = gCamCapability[mCameraId]->gradient_S * sens +
10219 gCamCapability[mCameraId]->offset_S;
10220 return ((s < 0.0) ? 0.0 : s);
10221}
10222
10223/*===========================================================================
10224 * FUNCTION : computeNoiseModelEntryO
10225 *
10226 * DESCRIPTION: function to map a given sensitivity to the O noise
10227 * model parameters in the DNG noise model.
10228 *
10229 * PARAMETERS : sens : the sensor sensitivity
10230 *
10231 ** RETURN : O (sensor readout) noise
10232 *
10233 *==========================================================================*/
10234double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10235 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10236 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10237 1.0 : (1.0 * sens / max_analog_sens);
10238 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10239 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10240 return ((o < 0.0) ? 0.0 : o);
10241}
10242
10243/*===========================================================================
10244 * FUNCTION : getSensorSensitivity
10245 *
10246 * DESCRIPTION: convert iso_mode to an integer value
10247 *
10248 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10249 *
10250 ** RETURN : sensitivity supported by sensor
10251 *
10252 *==========================================================================*/
10253int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10254{
10255 int32_t sensitivity;
10256
10257 switch (iso_mode) {
10258 case CAM_ISO_MODE_100:
10259 sensitivity = 100;
10260 break;
10261 case CAM_ISO_MODE_200:
10262 sensitivity = 200;
10263 break;
10264 case CAM_ISO_MODE_400:
10265 sensitivity = 400;
10266 break;
10267 case CAM_ISO_MODE_800:
10268 sensitivity = 800;
10269 break;
10270 case CAM_ISO_MODE_1600:
10271 sensitivity = 1600;
10272 break;
10273 default:
10274 sensitivity = -1;
10275 break;
10276 }
10277 return sensitivity;
10278}
10279
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010280int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10281 if (gHdrPlusClient != nullptr) {
10282 return OK;
10283 }
10284
10285 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10286 if (gHdrPlusClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010287 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10288 // to connect to Easel.
10289 bool doNotpowerOnEasel =
10290 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10291
10292 if (doNotpowerOnEasel) {
10293 gHdrPlusClient = nullptr;
10294 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10295 return OK;
10296 }
10297
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010298 // If Easel is present, power on Easel and suspend it immediately.
10299 status_t res = gHdrPlusClient->powerOnEasel();
10300 if (res != OK) {
10301 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10302 gHdrPlusClient = nullptr;
10303 return res;
10304 }
10305
10306 res = gHdrPlusClient->suspendEasel();
10307 if (res != OK) {
10308 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10309 }
10310
10311 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10312 } else {
10313 // Destroy HDR+ client if Easel isn't present.
10314 gHdrPlusClient = nullptr;
10315 }
10316
10317 return OK;
10318}
10319
Thierry Strudel3d639192016-09-09 11:52:26 -070010320/*===========================================================================
10321 * FUNCTION : getCamInfo
10322 *
10323 * DESCRIPTION: query camera capabilities
10324 *
10325 * PARAMETERS :
10326 * @cameraId : camera Id
10327 * @info : camera info struct to be filled in with camera capabilities
10328 *
10329 * RETURN : int type of status
10330 * NO_ERROR -- success
10331 * none-zero failure code
10332 *==========================================================================*/
10333int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10334 struct camera_info *info)
10335{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010336 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010337 int rc = 0;
10338
10339 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010340
10341 rc = initHdrPlusClientLocked();
10342 if (rc != OK) {
10343 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10344 pthread_mutex_unlock(&gCamLock);
10345 return rc;
10346 }
10347
Thierry Strudel3d639192016-09-09 11:52:26 -070010348 if (NULL == gCamCapability[cameraId]) {
10349 rc = initCapabilities(cameraId);
10350 if (rc < 0) {
10351 pthread_mutex_unlock(&gCamLock);
10352 return rc;
10353 }
10354 }
10355
10356 if (NULL == gStaticMetadata[cameraId]) {
10357 rc = initStaticMetadata(cameraId);
10358 if (rc < 0) {
10359 pthread_mutex_unlock(&gCamLock);
10360 return rc;
10361 }
10362 }
10363
10364 switch(gCamCapability[cameraId]->position) {
10365 case CAM_POSITION_BACK:
10366 case CAM_POSITION_BACK_AUX:
10367 info->facing = CAMERA_FACING_BACK;
10368 break;
10369
10370 case CAM_POSITION_FRONT:
10371 case CAM_POSITION_FRONT_AUX:
10372 info->facing = CAMERA_FACING_FRONT;
10373 break;
10374
10375 default:
10376 LOGE("Unknown position type %d for camera id:%d",
10377 gCamCapability[cameraId]->position, cameraId);
10378 rc = -1;
10379 break;
10380 }
10381
10382
10383 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010384#ifndef USE_HAL_3_3
10385 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10386#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010387 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010388#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010389 info->static_camera_characteristics = gStaticMetadata[cameraId];
10390
10391 //For now assume both cameras can operate independently.
10392 info->conflicting_devices = NULL;
10393 info->conflicting_devices_length = 0;
10394
10395 //resource cost is 100 * MIN(1.0, m/M),
10396 //where m is throughput requirement with maximum stream configuration
10397 //and M is CPP maximum throughput.
10398 float max_fps = 0.0;
10399 for (uint32_t i = 0;
10400 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10401 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10402 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10403 }
10404 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10405 gCamCapability[cameraId]->active_array_size.width *
10406 gCamCapability[cameraId]->active_array_size.height * max_fps /
10407 gCamCapability[cameraId]->max_pixel_bandwidth;
10408 info->resource_cost = 100 * MIN(1.0, ratio);
10409 LOGI("camera %d resource cost is %d", cameraId,
10410 info->resource_cost);
10411
10412 pthread_mutex_unlock(&gCamLock);
10413 return rc;
10414}
10415
10416/*===========================================================================
10417 * FUNCTION : translateCapabilityToMetadata
10418 *
10419 * DESCRIPTION: translate the capability into camera_metadata_t
10420 *
10421 * PARAMETERS : type of the request
10422 *
10423 *
10424 * RETURN : success: camera_metadata_t*
10425 * failure: NULL
10426 *
10427 *==========================================================================*/
10428camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10429{
10430 if (mDefaultMetadata[type] != NULL) {
10431 return mDefaultMetadata[type];
10432 }
10433 //first time we are handling this request
10434 //fill up the metadata structure using the wrapper class
10435 CameraMetadata settings;
10436 //translate from cam_capability_t to camera_metadata_tag_t
10437 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10438 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10439 int32_t defaultRequestID = 0;
10440 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10441
10442 /* OIS disable */
10443 char ois_prop[PROPERTY_VALUE_MAX];
10444 memset(ois_prop, 0, sizeof(ois_prop));
10445 property_get("persist.camera.ois.disable", ois_prop, "0");
10446 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10447
10448 /* Force video to use OIS */
10449 char videoOisProp[PROPERTY_VALUE_MAX];
10450 memset(videoOisProp, 0, sizeof(videoOisProp));
10451 property_get("persist.camera.ois.video", videoOisProp, "1");
10452 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010453
10454 // Hybrid AE enable/disable
10455 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10456 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10457 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10458 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10459
Thierry Strudel3d639192016-09-09 11:52:26 -070010460 uint8_t controlIntent = 0;
10461 uint8_t focusMode;
10462 uint8_t vsMode;
10463 uint8_t optStabMode;
10464 uint8_t cacMode;
10465 uint8_t edge_mode;
10466 uint8_t noise_red_mode;
10467 uint8_t tonemap_mode;
10468 bool highQualityModeEntryAvailable = FALSE;
10469 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010470 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010471 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10472 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010473 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010474
Thierry Strudel3d639192016-09-09 11:52:26 -070010475 switch (type) {
10476 case CAMERA3_TEMPLATE_PREVIEW:
10477 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10478 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10479 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10480 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10481 edge_mode = ANDROID_EDGE_MODE_FAST;
10482 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10483 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10484 break;
10485 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10486 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10487 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10488 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10489 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10490 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10491 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10492 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10493 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10494 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10495 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10496 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10497 highQualityModeEntryAvailable = TRUE;
10498 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10499 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10500 fastModeEntryAvailable = TRUE;
10501 }
10502 }
10503 if (highQualityModeEntryAvailable) {
10504 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10505 } else if (fastModeEntryAvailable) {
10506 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10507 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010508 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10509 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10510 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010511 break;
10512 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10513 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10514 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10515 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010516 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10517 edge_mode = ANDROID_EDGE_MODE_FAST;
10518 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10519 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10520 if (forceVideoOis)
10521 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10522 break;
10523 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10524 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10525 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10526 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010527 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10528 edge_mode = ANDROID_EDGE_MODE_FAST;
10529 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10530 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10531 if (forceVideoOis)
10532 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10533 break;
10534 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10535 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10536 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10537 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10538 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10539 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10540 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10541 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10542 break;
10543 case CAMERA3_TEMPLATE_MANUAL:
10544 edge_mode = ANDROID_EDGE_MODE_FAST;
10545 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10546 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10547 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10548 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10549 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10550 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10551 break;
10552 default:
10553 edge_mode = ANDROID_EDGE_MODE_FAST;
10554 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10555 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10556 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10557 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10558 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10559 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10560 break;
10561 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010562 // Set CAC to OFF if underlying device doesn't support
10563 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10564 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10565 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010566 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10567 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10568 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10569 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10570 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10571 }
10572 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010573 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010574
10575 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10576 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10577 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10578 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10579 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10580 || ois_disable)
10581 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10582 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010583 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010584
10585 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10586 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10587
10588 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10589 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10590
10591 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10592 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10593
10594 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10595 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10596
10597 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10598 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10599
10600 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10601 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10602
10603 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10604 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10605
10606 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10607 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10608
10609 /*flash*/
10610 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10611 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10612
10613 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10614 settings.update(ANDROID_FLASH_FIRING_POWER,
10615 &flashFiringLevel, 1);
10616
10617 /* lens */
10618 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10619 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10620
10621 if (gCamCapability[mCameraId]->filter_densities_count) {
10622 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10623 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10624 gCamCapability[mCameraId]->filter_densities_count);
10625 }
10626
10627 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10628 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10629
Thierry Strudel3d639192016-09-09 11:52:26 -070010630 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10631 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10632
10633 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10634 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10635
10636 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10637 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10638
10639 /* face detection (default to OFF) */
10640 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10641 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10642
Thierry Strudel54dc9782017-02-15 12:12:10 -080010643 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10644 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010645
10646 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10647 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10648
10649 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10650 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10651
Thierry Strudel3d639192016-09-09 11:52:26 -070010652
10653 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10654 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10655
10656 /* Exposure time(Update the Min Exposure Time)*/
10657 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10658 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10659
10660 /* frame duration */
10661 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10662 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10663
10664 /* sensitivity */
10665 static const int32_t default_sensitivity = 100;
10666 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010667#ifndef USE_HAL_3_3
10668 static const int32_t default_isp_sensitivity =
10669 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10670 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10671#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010672
10673 /*edge mode*/
10674 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10675
10676 /*noise reduction mode*/
10677 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10678
10679 /*color correction mode*/
10680 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10681 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10682
10683 /*transform matrix mode*/
10684 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10685
10686 int32_t scaler_crop_region[4];
10687 scaler_crop_region[0] = 0;
10688 scaler_crop_region[1] = 0;
10689 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10690 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10691 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10692
10693 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10694 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10695
10696 /*focus distance*/
10697 float focus_distance = 0.0;
10698 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10699
10700 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010701 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010702 float max_range = 0.0;
10703 float max_fixed_fps = 0.0;
10704 int32_t fps_range[2] = {0, 0};
10705 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10706 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010707 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10708 TEMPLATE_MAX_PREVIEW_FPS) {
10709 continue;
10710 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010711 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10712 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10713 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10714 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10715 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10716 if (range > max_range) {
10717 fps_range[0] =
10718 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10719 fps_range[1] =
10720 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10721 max_range = range;
10722 }
10723 } else {
10724 if (range < 0.01 && max_fixed_fps <
10725 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10726 fps_range[0] =
10727 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10728 fps_range[1] =
10729 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10730 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10731 }
10732 }
10733 }
10734 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10735
10736 /*precapture trigger*/
10737 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10738 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10739
10740 /*af trigger*/
10741 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10742 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10743
10744 /* ae & af regions */
10745 int32_t active_region[] = {
10746 gCamCapability[mCameraId]->active_array_size.left,
10747 gCamCapability[mCameraId]->active_array_size.top,
10748 gCamCapability[mCameraId]->active_array_size.left +
10749 gCamCapability[mCameraId]->active_array_size.width,
10750 gCamCapability[mCameraId]->active_array_size.top +
10751 gCamCapability[mCameraId]->active_array_size.height,
10752 0};
10753 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10754 sizeof(active_region) / sizeof(active_region[0]));
10755 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10756 sizeof(active_region) / sizeof(active_region[0]));
10757
10758 /* black level lock */
10759 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10760 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10761
Thierry Strudel3d639192016-09-09 11:52:26 -070010762 //special defaults for manual template
10763 if (type == CAMERA3_TEMPLATE_MANUAL) {
10764 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10765 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10766
10767 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10768 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10769
10770 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10771 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10772
10773 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10774 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10775
10776 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10777 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10778
10779 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10780 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10781 }
10782
10783
10784 /* TNR
10785 * We'll use this location to determine which modes TNR will be set.
10786 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10787 * This is not to be confused with linking on a per stream basis that decision
10788 * is still on per-session basis and will be handled as part of config stream
10789 */
10790 uint8_t tnr_enable = 0;
10791
10792 if (m_bTnrPreview || m_bTnrVideo) {
10793
10794 switch (type) {
10795 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10796 tnr_enable = 1;
10797 break;
10798
10799 default:
10800 tnr_enable = 0;
10801 break;
10802 }
10803
10804 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10805 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10806 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10807
10808 LOGD("TNR:%d with process plate %d for template:%d",
10809 tnr_enable, tnr_process_type, type);
10810 }
10811
10812 //Update Link tags to default
10813 int32_t sync_type = CAM_TYPE_STANDALONE;
10814 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10815
10816 int32_t is_main = 0; //this doesn't matter as app should overwrite
10817 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10818
10819 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10820
10821 /* CDS default */
10822 char prop[PROPERTY_VALUE_MAX];
10823 memset(prop, 0, sizeof(prop));
10824 property_get("persist.camera.CDS", prop, "Auto");
10825 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10826 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10827 if (CAM_CDS_MODE_MAX == cds_mode) {
10828 cds_mode = CAM_CDS_MODE_AUTO;
10829 }
10830
10831 /* Disabling CDS in templates which have TNR enabled*/
10832 if (tnr_enable)
10833 cds_mode = CAM_CDS_MODE_OFF;
10834
10835 int32_t mode = cds_mode;
10836 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010837
Thierry Strudel269c81a2016-10-12 12:13:59 -070010838 /* Manual Convergence AEC Speed is disabled by default*/
10839 float default_aec_speed = 0;
10840 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10841
10842 /* Manual Convergence AWB Speed is disabled by default*/
10843 float default_awb_speed = 0;
10844 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10845
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010846 // Set instant AEC to normal convergence by default
10847 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10848 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10849
Shuzhen Wang19463d72016-03-08 11:09:52 -080010850 /* hybrid ae */
10851 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10852
Thierry Strudel3d639192016-09-09 11:52:26 -070010853 mDefaultMetadata[type] = settings.release();
10854
10855 return mDefaultMetadata[type];
10856}
10857
10858/*===========================================================================
10859 * FUNCTION : setFrameParameters
10860 *
10861 * DESCRIPTION: set parameters per frame as requested in the metadata from
10862 * framework
10863 *
10864 * PARAMETERS :
10865 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010866 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010867 * @blob_request: Whether this request is a blob request or not
10868 *
10869 * RETURN : success: NO_ERROR
10870 * failure:
10871 *==========================================================================*/
10872int QCamera3HardwareInterface::setFrameParameters(
10873 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010874 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010875 int blob_request,
10876 uint32_t snapshotStreamId)
10877{
10878 /*translate from camera_metadata_t type to parm_type_t*/
10879 int rc = 0;
10880 int32_t hal_version = CAM_HAL_V3;
10881
10882 clear_metadata_buffer(mParameters);
10883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10884 LOGE("Failed to set hal version in the parameters");
10885 return BAD_VALUE;
10886 }
10887
10888 /*we need to update the frame number in the parameters*/
10889 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10890 request->frame_number)) {
10891 LOGE("Failed to set the frame number in the parameters");
10892 return BAD_VALUE;
10893 }
10894
10895 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010897 LOGE("Failed to set stream type mask in the parameters");
10898 return BAD_VALUE;
10899 }
10900
10901 if (mUpdateDebugLevel) {
10902 uint32_t dummyDebugLevel = 0;
10903 /* The value of dummyDebugLevel is irrelavent. On
10904 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10906 dummyDebugLevel)) {
10907 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10908 return BAD_VALUE;
10909 }
10910 mUpdateDebugLevel = false;
10911 }
10912
10913 if(request->settings != NULL){
10914 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10915 if (blob_request)
10916 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10917 }
10918
10919 return rc;
10920}
10921
10922/*===========================================================================
10923 * FUNCTION : setReprocParameters
10924 *
10925 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10926 * return it.
10927 *
10928 * PARAMETERS :
10929 * @request : request that needs to be serviced
10930 *
10931 * RETURN : success: NO_ERROR
10932 * failure:
10933 *==========================================================================*/
10934int32_t QCamera3HardwareInterface::setReprocParameters(
10935 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10936 uint32_t snapshotStreamId)
10937{
10938 /*translate from camera_metadata_t type to parm_type_t*/
10939 int rc = 0;
10940
10941 if (NULL == request->settings){
10942 LOGE("Reprocess settings cannot be NULL");
10943 return BAD_VALUE;
10944 }
10945
10946 if (NULL == reprocParam) {
10947 LOGE("Invalid reprocessing metadata buffer");
10948 return BAD_VALUE;
10949 }
10950 clear_metadata_buffer(reprocParam);
10951
10952 /*we need to update the frame number in the parameters*/
10953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10954 request->frame_number)) {
10955 LOGE("Failed to set the frame number in the parameters");
10956 return BAD_VALUE;
10957 }
10958
10959 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10960 if (rc < 0) {
10961 LOGE("Failed to translate reproc request");
10962 return rc;
10963 }
10964
10965 CameraMetadata frame_settings;
10966 frame_settings = request->settings;
10967 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10968 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10969 int32_t *crop_count =
10970 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10971 int32_t *crop_data =
10972 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10973 int32_t *roi_map =
10974 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10975 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10976 cam_crop_data_t crop_meta;
10977 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10978 crop_meta.num_of_streams = 1;
10979 crop_meta.crop_info[0].crop.left = crop_data[0];
10980 crop_meta.crop_info[0].crop.top = crop_data[1];
10981 crop_meta.crop_info[0].crop.width = crop_data[2];
10982 crop_meta.crop_info[0].crop.height = crop_data[3];
10983
10984 crop_meta.crop_info[0].roi_map.left =
10985 roi_map[0];
10986 crop_meta.crop_info[0].roi_map.top =
10987 roi_map[1];
10988 crop_meta.crop_info[0].roi_map.width =
10989 roi_map[2];
10990 crop_meta.crop_info[0].roi_map.height =
10991 roi_map[3];
10992
10993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10994 rc = BAD_VALUE;
10995 }
10996 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10997 request->input_buffer->stream,
10998 crop_meta.crop_info[0].crop.left,
10999 crop_meta.crop_info[0].crop.top,
11000 crop_meta.crop_info[0].crop.width,
11001 crop_meta.crop_info[0].crop.height);
11002 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11003 request->input_buffer->stream,
11004 crop_meta.crop_info[0].roi_map.left,
11005 crop_meta.crop_info[0].roi_map.top,
11006 crop_meta.crop_info[0].roi_map.width,
11007 crop_meta.crop_info[0].roi_map.height);
11008 } else {
11009 LOGE("Invalid reprocess crop count %d!", *crop_count);
11010 }
11011 } else {
11012 LOGE("No crop data from matching output stream");
11013 }
11014
11015 /* These settings are not needed for regular requests so handle them specially for
11016 reprocess requests; information needed for EXIF tags */
11017 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11018 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11019 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11020 if (NAME_NOT_FOUND != val) {
11021 uint32_t flashMode = (uint32_t)val;
11022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11023 rc = BAD_VALUE;
11024 }
11025 } else {
11026 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11027 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11028 }
11029 } else {
11030 LOGH("No flash mode in reprocess settings");
11031 }
11032
11033 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11034 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11035 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11036 rc = BAD_VALUE;
11037 }
11038 } else {
11039 LOGH("No flash state in reprocess settings");
11040 }
11041
11042 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11043 uint8_t *reprocessFlags =
11044 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11045 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11046 *reprocessFlags)) {
11047 rc = BAD_VALUE;
11048 }
11049 }
11050
Thierry Strudel54dc9782017-02-15 12:12:10 -080011051 // Add exif debug data to internal metadata
11052 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11053 mm_jpeg_debug_exif_params_t *debug_params =
11054 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11055 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11056 // AE
11057 if (debug_params->ae_debug_params_valid == TRUE) {
11058 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11059 debug_params->ae_debug_params);
11060 }
11061 // AWB
11062 if (debug_params->awb_debug_params_valid == TRUE) {
11063 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11064 debug_params->awb_debug_params);
11065 }
11066 // AF
11067 if (debug_params->af_debug_params_valid == TRUE) {
11068 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11069 debug_params->af_debug_params);
11070 }
11071 // ASD
11072 if (debug_params->asd_debug_params_valid == TRUE) {
11073 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11074 debug_params->asd_debug_params);
11075 }
11076 // Stats
11077 if (debug_params->stats_debug_params_valid == TRUE) {
11078 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11079 debug_params->stats_debug_params);
11080 }
11081 // BE Stats
11082 if (debug_params->bestats_debug_params_valid == TRUE) {
11083 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11084 debug_params->bestats_debug_params);
11085 }
11086 // BHIST
11087 if (debug_params->bhist_debug_params_valid == TRUE) {
11088 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11089 debug_params->bhist_debug_params);
11090 }
11091 // 3A Tuning
11092 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11093 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11094 debug_params->q3a_tuning_debug_params);
11095 }
11096 }
11097
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011098 // Add metadata which reprocess needs
11099 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11100 cam_reprocess_info_t *repro_info =
11101 (cam_reprocess_info_t *)frame_settings.find
11102 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011103 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011104 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011105 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011106 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011107 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011108 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011109 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011110 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011111 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011112 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011113 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011114 repro_info->pipeline_flip);
11115 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11116 repro_info->af_roi);
11117 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11118 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011119 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11120 CAM_INTF_PARM_ROTATION metadata then has been added in
11121 translateToHalMetadata. HAL need to keep this new rotation
11122 metadata. Otherwise, the old rotation info saved in the vendor tag
11123 would be used */
11124 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11125 CAM_INTF_PARM_ROTATION, reprocParam) {
11126 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11127 } else {
11128 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011129 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011130 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011131 }
11132
11133 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11134 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11135 roi.width and roi.height would be the final JPEG size.
11136 For now, HAL only checks this for reprocess request */
11137 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11138 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11139 uint8_t *enable =
11140 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11141 if (*enable == TRUE) {
11142 int32_t *crop_data =
11143 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11144 cam_stream_crop_info_t crop_meta;
11145 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11146 crop_meta.stream_id = 0;
11147 crop_meta.crop.left = crop_data[0];
11148 crop_meta.crop.top = crop_data[1];
11149 crop_meta.crop.width = crop_data[2];
11150 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011151 // The JPEG crop roi should match cpp output size
11152 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11153 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11154 crop_meta.roi_map.left = 0;
11155 crop_meta.roi_map.top = 0;
11156 crop_meta.roi_map.width = cpp_crop->crop.width;
11157 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011158 }
11159 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11160 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011161 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011162 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011163 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11164 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011165 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011166 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11167
11168 // Add JPEG scale information
11169 cam_dimension_t scale_dim;
11170 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11171 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11172 int32_t *roi =
11173 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11174 scale_dim.width = roi[2];
11175 scale_dim.height = roi[3];
11176 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11177 scale_dim);
11178 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11179 scale_dim.width, scale_dim.height, mCameraId);
11180 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011181 }
11182 }
11183
11184 return rc;
11185}
11186
11187/*===========================================================================
11188 * FUNCTION : saveRequestSettings
11189 *
11190 * DESCRIPTION: Add any settings that might have changed to the request settings
11191 * and save the settings to be applied on the frame
11192 *
11193 * PARAMETERS :
11194 * @jpegMetadata : the extracted and/or modified jpeg metadata
11195 * @request : request with initial settings
11196 *
11197 * RETURN :
11198 * camera_metadata_t* : pointer to the saved request settings
11199 *==========================================================================*/
11200camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11201 const CameraMetadata &jpegMetadata,
11202 camera3_capture_request_t *request)
11203{
11204 camera_metadata_t *resultMetadata;
11205 CameraMetadata camMetadata;
11206 camMetadata = request->settings;
11207
11208 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11209 int32_t thumbnail_size[2];
11210 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11211 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11212 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11213 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11214 }
11215
11216 if (request->input_buffer != NULL) {
11217 uint8_t reprocessFlags = 1;
11218 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11219 (uint8_t*)&reprocessFlags,
11220 sizeof(reprocessFlags));
11221 }
11222
11223 resultMetadata = camMetadata.release();
11224 return resultMetadata;
11225}
11226
11227/*===========================================================================
11228 * FUNCTION : setHalFpsRange
11229 *
11230 * DESCRIPTION: set FPS range parameter
11231 *
11232 *
11233 * PARAMETERS :
11234 * @settings : Metadata from framework
11235 * @hal_metadata: Metadata buffer
11236 *
11237 *
11238 * RETURN : success: NO_ERROR
11239 * failure:
11240 *==========================================================================*/
11241int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11242 metadata_buffer_t *hal_metadata)
11243{
11244 int32_t rc = NO_ERROR;
11245 cam_fps_range_t fps_range;
11246 fps_range.min_fps = (float)
11247 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11248 fps_range.max_fps = (float)
11249 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11250 fps_range.video_min_fps = fps_range.min_fps;
11251 fps_range.video_max_fps = fps_range.max_fps;
11252
11253 LOGD("aeTargetFpsRange fps: [%f %f]",
11254 fps_range.min_fps, fps_range.max_fps);
11255 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11256 * follows:
11257 * ---------------------------------------------------------------|
11258 * Video stream is absent in configure_streams |
11259 * (Camcorder preview before the first video record |
11260 * ---------------------------------------------------------------|
11261 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11262 * | | | vid_min/max_fps|
11263 * ---------------------------------------------------------------|
11264 * NO | [ 30, 240] | 240 | [240, 240] |
11265 * |-------------|-------------|----------------|
11266 * | [240, 240] | 240 | [240, 240] |
11267 * ---------------------------------------------------------------|
11268 * Video stream is present in configure_streams |
11269 * ---------------------------------------------------------------|
11270 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11271 * | | | vid_min/max_fps|
11272 * ---------------------------------------------------------------|
11273 * NO | [ 30, 240] | 240 | [240, 240] |
11274 * (camcorder prev |-------------|-------------|----------------|
11275 * after video rec | [240, 240] | 240 | [240, 240] |
11276 * is stopped) | | | |
11277 * ---------------------------------------------------------------|
11278 * YES | [ 30, 240] | 240 | [240, 240] |
11279 * |-------------|-------------|----------------|
11280 * | [240, 240] | 240 | [240, 240] |
11281 * ---------------------------------------------------------------|
11282 * When Video stream is absent in configure_streams,
11283 * preview fps = sensor_fps / batchsize
11284 * Eg: for 240fps at batchSize 4, preview = 60fps
11285 * for 120fps at batchSize 4, preview = 30fps
11286 *
11287 * When video stream is present in configure_streams, preview fps is as per
11288 * the ratio of preview buffers to video buffers requested in process
11289 * capture request
11290 */
11291 mBatchSize = 0;
11292 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11293 fps_range.min_fps = fps_range.video_max_fps;
11294 fps_range.video_min_fps = fps_range.video_max_fps;
11295 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11296 fps_range.max_fps);
11297 if (NAME_NOT_FOUND != val) {
11298 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11300 return BAD_VALUE;
11301 }
11302
11303 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11304 /* If batchmode is currently in progress and the fps changes,
11305 * set the flag to restart the sensor */
11306 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11307 (mHFRVideoFps != fps_range.max_fps)) {
11308 mNeedSensorRestart = true;
11309 }
11310 mHFRVideoFps = fps_range.max_fps;
11311 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11312 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11313 mBatchSize = MAX_HFR_BATCH_SIZE;
11314 }
11315 }
11316 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11317
11318 }
11319 } else {
11320 /* HFR mode is session param in backend/ISP. This should be reset when
11321 * in non-HFR mode */
11322 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11323 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11324 return BAD_VALUE;
11325 }
11326 }
11327 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11328 return BAD_VALUE;
11329 }
11330 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11331 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11332 return rc;
11333}
11334
11335/*===========================================================================
11336 * FUNCTION : translateToHalMetadata
11337 *
11338 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11339 *
11340 *
11341 * PARAMETERS :
11342 * @request : request sent from framework
11343 *
11344 *
11345 * RETURN : success: NO_ERROR
11346 * failure:
11347 *==========================================================================*/
11348int QCamera3HardwareInterface::translateToHalMetadata
11349 (const camera3_capture_request_t *request,
11350 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011351 uint32_t snapshotStreamId) {
11352 if (request == nullptr || hal_metadata == nullptr) {
11353 return BAD_VALUE;
11354 }
11355
11356 int64_t minFrameDuration = getMinFrameDuration(request);
11357
11358 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11359 minFrameDuration);
11360}
11361
11362int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11363 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11364 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11365
Thierry Strudel3d639192016-09-09 11:52:26 -070011366 int rc = 0;
11367 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011368 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011369
11370 /* Do not change the order of the following list unless you know what you are
11371 * doing.
11372 * The order is laid out in such a way that parameters in the front of the table
11373 * may be used to override the parameters later in the table. Examples are:
11374 * 1. META_MODE should precede AEC/AWB/AF MODE
11375 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11376 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11377 * 4. Any mode should precede it's corresponding settings
11378 */
11379 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11380 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11382 rc = BAD_VALUE;
11383 }
11384 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11385 if (rc != NO_ERROR) {
11386 LOGE("extractSceneMode failed");
11387 }
11388 }
11389
11390 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11391 uint8_t fwk_aeMode =
11392 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11393 uint8_t aeMode;
11394 int32_t redeye;
11395
11396 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11397 aeMode = CAM_AE_MODE_OFF;
11398 } else {
11399 aeMode = CAM_AE_MODE_ON;
11400 }
11401 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11402 redeye = 1;
11403 } else {
11404 redeye = 0;
11405 }
11406
11407 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11408 fwk_aeMode);
11409 if (NAME_NOT_FOUND != val) {
11410 int32_t flashMode = (int32_t)val;
11411 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11412 }
11413
11414 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11415 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11416 rc = BAD_VALUE;
11417 }
11418 }
11419
11420 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11421 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11422 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11423 fwk_whiteLevel);
11424 if (NAME_NOT_FOUND != val) {
11425 uint8_t whiteLevel = (uint8_t)val;
11426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11427 rc = BAD_VALUE;
11428 }
11429 }
11430 }
11431
11432 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11433 uint8_t fwk_cacMode =
11434 frame_settings.find(
11435 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11436 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11437 fwk_cacMode);
11438 if (NAME_NOT_FOUND != val) {
11439 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11440 bool entryAvailable = FALSE;
11441 // Check whether Frameworks set CAC mode is supported in device or not
11442 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11443 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11444 entryAvailable = TRUE;
11445 break;
11446 }
11447 }
11448 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11449 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11450 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11451 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11452 if (entryAvailable == FALSE) {
11453 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11454 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11455 } else {
11456 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11457 // High is not supported and so set the FAST as spec say's underlying
11458 // device implementation can be the same for both modes.
11459 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11460 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11461 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11462 // in order to avoid the fps drop due to high quality
11463 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11464 } else {
11465 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11466 }
11467 }
11468 }
11469 LOGD("Final cacMode is %d", cacMode);
11470 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11471 rc = BAD_VALUE;
11472 }
11473 } else {
11474 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11475 }
11476 }
11477
Thierry Strudel2896d122017-02-23 19:18:03 -080011478 char af_value[PROPERTY_VALUE_MAX];
11479 property_get("persist.camera.af.infinity", af_value, "0");
11480
Jason Lee84ae9972017-02-24 13:24:24 -080011481 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011482 if (atoi(af_value) == 0) {
11483 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011484 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011485 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11486 fwk_focusMode);
11487 if (NAME_NOT_FOUND != val) {
11488 uint8_t focusMode = (uint8_t)val;
11489 LOGD("set focus mode %d", focusMode);
11490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11491 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11492 rc = BAD_VALUE;
11493 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011494 }
11495 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011496 } else {
11497 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11498 LOGE("Focus forced to infinity %d", focusMode);
11499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11500 rc = BAD_VALUE;
11501 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011502 }
11503
Jason Lee84ae9972017-02-24 13:24:24 -080011504 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11505 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011506 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11508 focalDistance)) {
11509 rc = BAD_VALUE;
11510 }
11511 }
11512
11513 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11514 uint8_t fwk_antibandingMode =
11515 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11516 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11517 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11518 if (NAME_NOT_FOUND != val) {
11519 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011520 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11521 if (m60HzZone) {
11522 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11523 } else {
11524 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11525 }
11526 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11528 hal_antibandingMode)) {
11529 rc = BAD_VALUE;
11530 }
11531 }
11532 }
11533
11534 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11535 int32_t expCompensation = frame_settings.find(
11536 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11537 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11538 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11539 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11540 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011541 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11543 expCompensation)) {
11544 rc = BAD_VALUE;
11545 }
11546 }
11547
11548 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11549 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11551 rc = BAD_VALUE;
11552 }
11553 }
11554 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11555 rc = setHalFpsRange(frame_settings, hal_metadata);
11556 if (rc != NO_ERROR) {
11557 LOGE("setHalFpsRange failed");
11558 }
11559 }
11560
11561 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11562 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11564 rc = BAD_VALUE;
11565 }
11566 }
11567
11568 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11569 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11570 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11571 fwk_effectMode);
11572 if (NAME_NOT_FOUND != val) {
11573 uint8_t effectMode = (uint8_t)val;
11574 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11575 rc = BAD_VALUE;
11576 }
11577 }
11578 }
11579
11580 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11581 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11583 colorCorrectMode)) {
11584 rc = BAD_VALUE;
11585 }
11586 }
11587
11588 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11589 cam_color_correct_gains_t colorCorrectGains;
11590 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11591 colorCorrectGains.gains[i] =
11592 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11593 }
11594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11595 colorCorrectGains)) {
11596 rc = BAD_VALUE;
11597 }
11598 }
11599
11600 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11601 cam_color_correct_matrix_t colorCorrectTransform;
11602 cam_rational_type_t transform_elem;
11603 size_t num = 0;
11604 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11605 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11606 transform_elem.numerator =
11607 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11608 transform_elem.denominator =
11609 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11610 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11611 num++;
11612 }
11613 }
11614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11615 colorCorrectTransform)) {
11616 rc = BAD_VALUE;
11617 }
11618 }
11619
11620 cam_trigger_t aecTrigger;
11621 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11622 aecTrigger.trigger_id = -1;
11623 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11624 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11625 aecTrigger.trigger =
11626 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11627 aecTrigger.trigger_id =
11628 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11630 aecTrigger)) {
11631 rc = BAD_VALUE;
11632 }
11633 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11634 aecTrigger.trigger, aecTrigger.trigger_id);
11635 }
11636
11637 /*af_trigger must come with a trigger id*/
11638 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11639 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11640 cam_trigger_t af_trigger;
11641 af_trigger.trigger =
11642 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11643 af_trigger.trigger_id =
11644 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11645 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11646 rc = BAD_VALUE;
11647 }
11648 LOGD("AfTrigger: %d AfTriggerID: %d",
11649 af_trigger.trigger, af_trigger.trigger_id);
11650 }
11651
11652 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11653 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11655 rc = BAD_VALUE;
11656 }
11657 }
11658 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11659 cam_edge_application_t edge_application;
11660 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011661
Thierry Strudel3d639192016-09-09 11:52:26 -070011662 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11663 edge_application.sharpness = 0;
11664 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011665 edge_application.sharpness =
11666 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11667 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11668 int32_t sharpness =
11669 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11670 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11671 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11672 LOGD("Setting edge mode sharpness %d", sharpness);
11673 edge_application.sharpness = sharpness;
11674 }
11675 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011676 }
11677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11678 rc = BAD_VALUE;
11679 }
11680 }
11681
11682 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11683 int32_t respectFlashMode = 1;
11684 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11685 uint8_t fwk_aeMode =
11686 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11687 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11688 respectFlashMode = 0;
11689 LOGH("AE Mode controls flash, ignore android.flash.mode");
11690 }
11691 }
11692 if (respectFlashMode) {
11693 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11694 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11695 LOGH("flash mode after mapping %d", val);
11696 // To check: CAM_INTF_META_FLASH_MODE usage
11697 if (NAME_NOT_FOUND != val) {
11698 uint8_t flashMode = (uint8_t)val;
11699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11700 rc = BAD_VALUE;
11701 }
11702 }
11703 }
11704 }
11705
11706 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11707 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11708 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11709 rc = BAD_VALUE;
11710 }
11711 }
11712
11713 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11714 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11716 flashFiringTime)) {
11717 rc = BAD_VALUE;
11718 }
11719 }
11720
11721 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11722 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11724 hotPixelMode)) {
11725 rc = BAD_VALUE;
11726 }
11727 }
11728
11729 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11730 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11732 lensAperture)) {
11733 rc = BAD_VALUE;
11734 }
11735 }
11736
11737 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11738 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11740 filterDensity)) {
11741 rc = BAD_VALUE;
11742 }
11743 }
11744
11745 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11746 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11748 focalLength)) {
11749 rc = BAD_VALUE;
11750 }
11751 }
11752
11753 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11754 uint8_t optStabMode =
11755 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11757 optStabMode)) {
11758 rc = BAD_VALUE;
11759 }
11760 }
11761
11762 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11763 uint8_t videoStabMode =
11764 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11765 LOGD("videoStabMode from APP = %d", videoStabMode);
11766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11767 videoStabMode)) {
11768 rc = BAD_VALUE;
11769 }
11770 }
11771
11772
11773 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11774 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11776 noiseRedMode)) {
11777 rc = BAD_VALUE;
11778 }
11779 }
11780
11781 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11782 float reprocessEffectiveExposureFactor =
11783 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11784 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11785 reprocessEffectiveExposureFactor)) {
11786 rc = BAD_VALUE;
11787 }
11788 }
11789
11790 cam_crop_region_t scalerCropRegion;
11791 bool scalerCropSet = false;
11792 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11793 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11794 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11795 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11796 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11797
11798 // Map coordinate system from active array to sensor output.
11799 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11800 scalerCropRegion.width, scalerCropRegion.height);
11801
11802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11803 scalerCropRegion)) {
11804 rc = BAD_VALUE;
11805 }
11806 scalerCropSet = true;
11807 }
11808
11809 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11810 int64_t sensorExpTime =
11811 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11812 LOGD("setting sensorExpTime %lld", sensorExpTime);
11813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11814 sensorExpTime)) {
11815 rc = BAD_VALUE;
11816 }
11817 }
11818
11819 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11820 int64_t sensorFrameDuration =
11821 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011822 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11823 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11824 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11825 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11827 sensorFrameDuration)) {
11828 rc = BAD_VALUE;
11829 }
11830 }
11831
11832 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11833 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11834 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11835 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11836 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11837 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11838 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11840 sensorSensitivity)) {
11841 rc = BAD_VALUE;
11842 }
11843 }
11844
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011845#ifndef USE_HAL_3_3
11846 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11847 int32_t ispSensitivity =
11848 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11849 if (ispSensitivity <
11850 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11851 ispSensitivity =
11852 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11853 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11854 }
11855 if (ispSensitivity >
11856 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11857 ispSensitivity =
11858 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11859 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11860 }
11861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11862 ispSensitivity)) {
11863 rc = BAD_VALUE;
11864 }
11865 }
11866#endif
11867
Thierry Strudel3d639192016-09-09 11:52:26 -070011868 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11869 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11870 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11871 rc = BAD_VALUE;
11872 }
11873 }
11874
11875 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11876 uint8_t fwk_facedetectMode =
11877 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11878
11879 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11880 fwk_facedetectMode);
11881
11882 if (NAME_NOT_FOUND != val) {
11883 uint8_t facedetectMode = (uint8_t)val;
11884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11885 facedetectMode)) {
11886 rc = BAD_VALUE;
11887 }
11888 }
11889 }
11890
Thierry Strudel54dc9782017-02-15 12:12:10 -080011891 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011892 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011893 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11895 histogramMode)) {
11896 rc = BAD_VALUE;
11897 }
11898 }
11899
11900 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11901 uint8_t sharpnessMapMode =
11902 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11904 sharpnessMapMode)) {
11905 rc = BAD_VALUE;
11906 }
11907 }
11908
11909 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11910 uint8_t tonemapMode =
11911 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11912 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11913 rc = BAD_VALUE;
11914 }
11915 }
11916 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11917 /*All tonemap channels will have the same number of points*/
11918 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11919 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11920 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11921 cam_rgb_tonemap_curves tonemapCurves;
11922 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11923 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11924 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11925 tonemapCurves.tonemap_points_cnt,
11926 CAM_MAX_TONEMAP_CURVE_SIZE);
11927 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11928 }
11929
11930 /* ch0 = G*/
11931 size_t point = 0;
11932 cam_tonemap_curve_t tonemapCurveGreen;
11933 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11934 for (size_t j = 0; j < 2; j++) {
11935 tonemapCurveGreen.tonemap_points[i][j] =
11936 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11937 point++;
11938 }
11939 }
11940 tonemapCurves.curves[0] = tonemapCurveGreen;
11941
11942 /* ch 1 = B */
11943 point = 0;
11944 cam_tonemap_curve_t tonemapCurveBlue;
11945 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11946 for (size_t j = 0; j < 2; j++) {
11947 tonemapCurveBlue.tonemap_points[i][j] =
11948 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11949 point++;
11950 }
11951 }
11952 tonemapCurves.curves[1] = tonemapCurveBlue;
11953
11954 /* ch 2 = R */
11955 point = 0;
11956 cam_tonemap_curve_t tonemapCurveRed;
11957 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11958 for (size_t j = 0; j < 2; j++) {
11959 tonemapCurveRed.tonemap_points[i][j] =
11960 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11961 point++;
11962 }
11963 }
11964 tonemapCurves.curves[2] = tonemapCurveRed;
11965
11966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11967 tonemapCurves)) {
11968 rc = BAD_VALUE;
11969 }
11970 }
11971
11972 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11973 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11975 captureIntent)) {
11976 rc = BAD_VALUE;
11977 }
11978 }
11979
11980 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11981 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11983 blackLevelLock)) {
11984 rc = BAD_VALUE;
11985 }
11986 }
11987
11988 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11989 uint8_t lensShadingMapMode =
11990 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11992 lensShadingMapMode)) {
11993 rc = BAD_VALUE;
11994 }
11995 }
11996
11997 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11998 cam_area_t roi;
11999 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012000 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012001
12002 // Map coordinate system from active array to sensor output.
12003 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12004 roi.rect.height);
12005
12006 if (scalerCropSet) {
12007 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12008 }
12009 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12010 rc = BAD_VALUE;
12011 }
12012 }
12013
12014 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12015 cam_area_t roi;
12016 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012017 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012018
12019 // Map coordinate system from active array to sensor output.
12020 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12021 roi.rect.height);
12022
12023 if (scalerCropSet) {
12024 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12025 }
12026 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12027 rc = BAD_VALUE;
12028 }
12029 }
12030
12031 // CDS for non-HFR non-video mode
12032 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12033 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12034 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12035 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12036 LOGE("Invalid CDS mode %d!", *fwk_cds);
12037 } else {
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12039 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12040 rc = BAD_VALUE;
12041 }
12042 }
12043 }
12044
Thierry Strudel04e026f2016-10-10 11:27:36 -070012045 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012046 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012047 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012048 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12049 }
12050 if (m_bVideoHdrEnabled)
12051 vhdr = CAM_VIDEO_HDR_MODE_ON;
12052
Thierry Strudel54dc9782017-02-15 12:12:10 -080012053 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12054
12055 if(vhdr != curr_hdr_state)
12056 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12057
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012058 rc = setVideoHdrMode(mParameters, vhdr);
12059 if (rc != NO_ERROR) {
12060 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012061 }
12062
12063 //IR
12064 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12065 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12066 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012067 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12068 uint8_t isIRon = 0;
12069
12070 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012071 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12072 LOGE("Invalid IR mode %d!", fwk_ir);
12073 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012074 if(isIRon != curr_ir_state )
12075 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12076
Thierry Strudel04e026f2016-10-10 11:27:36 -070012077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12078 CAM_INTF_META_IR_MODE, fwk_ir)) {
12079 rc = BAD_VALUE;
12080 }
12081 }
12082 }
12083
Thierry Strudel54dc9782017-02-15 12:12:10 -080012084 //Binning Correction Mode
12085 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12086 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12087 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12088 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12089 || (0 > fwk_binning_correction)) {
12090 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12091 } else {
12092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12093 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12094 rc = BAD_VALUE;
12095 }
12096 }
12097 }
12098
Thierry Strudel269c81a2016-10-12 12:13:59 -070012099 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12100 float aec_speed;
12101 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12102 LOGD("AEC Speed :%f", aec_speed);
12103 if ( aec_speed < 0 ) {
12104 LOGE("Invalid AEC mode %f!", aec_speed);
12105 } else {
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12107 aec_speed)) {
12108 rc = BAD_VALUE;
12109 }
12110 }
12111 }
12112
12113 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12114 float awb_speed;
12115 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12116 LOGD("AWB Speed :%f", awb_speed);
12117 if ( awb_speed < 0 ) {
12118 LOGE("Invalid AWB mode %f!", awb_speed);
12119 } else {
12120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12121 awb_speed)) {
12122 rc = BAD_VALUE;
12123 }
12124 }
12125 }
12126
Thierry Strudel3d639192016-09-09 11:52:26 -070012127 // TNR
12128 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12129 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12130 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012131 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012132 cam_denoise_param_t tnr;
12133 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12134 tnr.process_plates =
12135 (cam_denoise_process_type_t)frame_settings.find(
12136 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12137 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012138
12139 if(b_TnrRequested != curr_tnr_state)
12140 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12141
Thierry Strudel3d639192016-09-09 11:52:26 -070012142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12143 rc = BAD_VALUE;
12144 }
12145 }
12146
Thierry Strudel54dc9782017-02-15 12:12:10 -080012147 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012148 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012149 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12151 *exposure_metering_mode)) {
12152 rc = BAD_VALUE;
12153 }
12154 }
12155
Thierry Strudel3d639192016-09-09 11:52:26 -070012156 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12157 int32_t fwk_testPatternMode =
12158 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12159 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12160 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12161
12162 if (NAME_NOT_FOUND != testPatternMode) {
12163 cam_test_pattern_data_t testPatternData;
12164 memset(&testPatternData, 0, sizeof(testPatternData));
12165 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12166 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12167 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12168 int32_t *fwk_testPatternData =
12169 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12170 testPatternData.r = fwk_testPatternData[0];
12171 testPatternData.b = fwk_testPatternData[3];
12172 switch (gCamCapability[mCameraId]->color_arrangement) {
12173 case CAM_FILTER_ARRANGEMENT_RGGB:
12174 case CAM_FILTER_ARRANGEMENT_GRBG:
12175 testPatternData.gr = fwk_testPatternData[1];
12176 testPatternData.gb = fwk_testPatternData[2];
12177 break;
12178 case CAM_FILTER_ARRANGEMENT_GBRG:
12179 case CAM_FILTER_ARRANGEMENT_BGGR:
12180 testPatternData.gr = fwk_testPatternData[2];
12181 testPatternData.gb = fwk_testPatternData[1];
12182 break;
12183 default:
12184 LOGE("color arrangement %d is not supported",
12185 gCamCapability[mCameraId]->color_arrangement);
12186 break;
12187 }
12188 }
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12190 testPatternData)) {
12191 rc = BAD_VALUE;
12192 }
12193 } else {
12194 LOGE("Invalid framework sensor test pattern mode %d",
12195 fwk_testPatternMode);
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12200 size_t count = 0;
12201 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12202 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12203 gps_coords.data.d, gps_coords.count, count);
12204 if (gps_coords.count != count) {
12205 rc = BAD_VALUE;
12206 }
12207 }
12208
12209 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12210 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12211 size_t count = 0;
12212 const char *gps_methods_src = (const char *)
12213 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12214 memset(gps_methods, '\0', sizeof(gps_methods));
12215 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12216 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12217 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12218 if (GPS_PROCESSING_METHOD_SIZE != count) {
12219 rc = BAD_VALUE;
12220 }
12221 }
12222
12223 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12224 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12226 gps_timestamp)) {
12227 rc = BAD_VALUE;
12228 }
12229 }
12230
12231 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12232 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12233 cam_rotation_info_t rotation_info;
12234 if (orientation == 0) {
12235 rotation_info.rotation = ROTATE_0;
12236 } else if (orientation == 90) {
12237 rotation_info.rotation = ROTATE_90;
12238 } else if (orientation == 180) {
12239 rotation_info.rotation = ROTATE_180;
12240 } else if (orientation == 270) {
12241 rotation_info.rotation = ROTATE_270;
12242 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012243 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012244 rotation_info.streamId = snapshotStreamId;
12245 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12247 rc = BAD_VALUE;
12248 }
12249 }
12250
12251 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12252 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12254 rc = BAD_VALUE;
12255 }
12256 }
12257
12258 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12259 uint32_t thumb_quality = (uint32_t)
12260 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12262 thumb_quality)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
12266
12267 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12268 cam_dimension_t dim;
12269 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12270 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12272 rc = BAD_VALUE;
12273 }
12274 }
12275
12276 // Internal metadata
12277 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12278 size_t count = 0;
12279 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12280 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12281 privatedata.data.i32, privatedata.count, count);
12282 if (privatedata.count != count) {
12283 rc = BAD_VALUE;
12284 }
12285 }
12286
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012287 // ISO/Exposure Priority
12288 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12289 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12290 cam_priority_mode_t mode =
12291 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12292 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12293 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12294 use_iso_exp_pty.previewOnly = FALSE;
12295 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12296 use_iso_exp_pty.value = *ptr;
12297
12298 if(CAM_ISO_PRIORITY == mode) {
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12300 use_iso_exp_pty)) {
12301 rc = BAD_VALUE;
12302 }
12303 }
12304 else {
12305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12306 use_iso_exp_pty)) {
12307 rc = BAD_VALUE;
12308 }
12309 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012310
12311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12312 rc = BAD_VALUE;
12313 }
12314 }
12315 } else {
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12317 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012318 }
12319 }
12320
12321 // Saturation
12322 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12323 int32_t* use_saturation =
12324 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12326 rc = BAD_VALUE;
12327 }
12328 }
12329
Thierry Strudel3d639192016-09-09 11:52:26 -070012330 // EV step
12331 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12332 gCamCapability[mCameraId]->exp_compensation_step)) {
12333 rc = BAD_VALUE;
12334 }
12335
12336 // CDS info
12337 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12338 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12339 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12340
12341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12342 CAM_INTF_META_CDS_DATA, *cdsData)) {
12343 rc = BAD_VALUE;
12344 }
12345 }
12346
Shuzhen Wang19463d72016-03-08 11:09:52 -080012347 // Hybrid AE
12348 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12349 uint8_t *hybrid_ae = (uint8_t *)
12350 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12351
12352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12353 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12354 rc = BAD_VALUE;
12355 }
12356 }
12357
Shuzhen Wang14415f52016-11-16 18:26:18 -080012358 // Histogram
12359 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12360 uint8_t histogramMode =
12361 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12363 histogramMode)) {
12364 rc = BAD_VALUE;
12365 }
12366 }
12367
12368 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12369 int32_t histogramBins =
12370 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12372 histogramBins)) {
12373 rc = BAD_VALUE;
12374 }
12375 }
12376
Thierry Strudel3d639192016-09-09 11:52:26 -070012377 return rc;
12378}
12379
12380/*===========================================================================
12381 * FUNCTION : captureResultCb
12382 *
12383 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12384 *
12385 * PARAMETERS :
12386 * @frame : frame information from mm-camera-interface
12387 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12388 * @userdata: userdata
12389 *
12390 * RETURN : NONE
12391 *==========================================================================*/
12392void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12393 camera3_stream_buffer_t *buffer,
12394 uint32_t frame_number, bool isInputBuffer, void *userdata)
12395{
12396 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12397 if (hw == NULL) {
12398 LOGE("Invalid hw %p", hw);
12399 return;
12400 }
12401
12402 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12403 return;
12404}
12405
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012406/*===========================================================================
12407 * FUNCTION : setBufferErrorStatus
12408 *
12409 * DESCRIPTION: Callback handler for channels to report any buffer errors
12410 *
12411 * PARAMETERS :
12412 * @ch : Channel on which buffer error is reported from
12413 * @frame_number : frame number on which buffer error is reported on
12414 * @buffer_status : buffer error status
12415 * @userdata: userdata
12416 *
12417 * RETURN : NONE
12418 *==========================================================================*/
12419void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12420 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12421{
12422 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12423 if (hw == NULL) {
12424 LOGE("Invalid hw %p", hw);
12425 return;
12426 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012427
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012428 hw->setBufferErrorStatus(ch, frame_number, err);
12429 return;
12430}
12431
12432void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12433 uint32_t frameNumber, camera3_buffer_status_t err)
12434{
12435 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12436 pthread_mutex_lock(&mMutex);
12437
12438 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12439 if (req.frame_number != frameNumber)
12440 continue;
12441 for (auto& k : req.mPendingBufferList) {
12442 if(k.stream->priv == ch) {
12443 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12444 }
12445 }
12446 }
12447
12448 pthread_mutex_unlock(&mMutex);
12449 return;
12450}
Thierry Strudel3d639192016-09-09 11:52:26 -070012451/*===========================================================================
12452 * FUNCTION : initialize
12453 *
12454 * DESCRIPTION: Pass framework callback pointers to HAL
12455 *
12456 * PARAMETERS :
12457 *
12458 *
12459 * RETURN : Success : 0
12460 * Failure: -ENODEV
12461 *==========================================================================*/
12462
12463int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12464 const camera3_callback_ops_t *callback_ops)
12465{
12466 LOGD("E");
12467 QCamera3HardwareInterface *hw =
12468 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12469 if (!hw) {
12470 LOGE("NULL camera device");
12471 return -ENODEV;
12472 }
12473
12474 int rc = hw->initialize(callback_ops);
12475 LOGD("X");
12476 return rc;
12477}
12478
12479/*===========================================================================
12480 * FUNCTION : configure_streams
12481 *
12482 * DESCRIPTION:
12483 *
12484 * PARAMETERS :
12485 *
12486 *
12487 * RETURN : Success: 0
12488 * Failure: -EINVAL (if stream configuration is invalid)
12489 * -ENODEV (fatal error)
12490 *==========================================================================*/
12491
12492int QCamera3HardwareInterface::configure_streams(
12493 const struct camera3_device *device,
12494 camera3_stream_configuration_t *stream_list)
12495{
12496 LOGD("E");
12497 QCamera3HardwareInterface *hw =
12498 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12499 if (!hw) {
12500 LOGE("NULL camera device");
12501 return -ENODEV;
12502 }
12503 int rc = hw->configureStreams(stream_list);
12504 LOGD("X");
12505 return rc;
12506}
12507
12508/*===========================================================================
12509 * FUNCTION : construct_default_request_settings
12510 *
12511 * DESCRIPTION: Configure a settings buffer to meet the required use case
12512 *
12513 * PARAMETERS :
12514 *
12515 *
12516 * RETURN : Success: Return valid metadata
12517 * Failure: Return NULL
12518 *==========================================================================*/
12519const camera_metadata_t* QCamera3HardwareInterface::
12520 construct_default_request_settings(const struct camera3_device *device,
12521 int type)
12522{
12523
12524 LOGD("E");
12525 camera_metadata_t* fwk_metadata = NULL;
12526 QCamera3HardwareInterface *hw =
12527 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12528 if (!hw) {
12529 LOGE("NULL camera device");
12530 return NULL;
12531 }
12532
12533 fwk_metadata = hw->translateCapabilityToMetadata(type);
12534
12535 LOGD("X");
12536 return fwk_metadata;
12537}
12538
12539/*===========================================================================
12540 * FUNCTION : process_capture_request
12541 *
12542 * DESCRIPTION:
12543 *
12544 * PARAMETERS :
12545 *
12546 *
12547 * RETURN :
12548 *==========================================================================*/
12549int QCamera3HardwareInterface::process_capture_request(
12550 const struct camera3_device *device,
12551 camera3_capture_request_t *request)
12552{
12553 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012554 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012555 QCamera3HardwareInterface *hw =
12556 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12557 if (!hw) {
12558 LOGE("NULL camera device");
12559 return -EINVAL;
12560 }
12561
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012562 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012563 LOGD("X");
12564 return rc;
12565}
12566
12567/*===========================================================================
12568 * FUNCTION : dump
12569 *
12570 * DESCRIPTION:
12571 *
12572 * PARAMETERS :
12573 *
12574 *
12575 * RETURN :
12576 *==========================================================================*/
12577
12578void QCamera3HardwareInterface::dump(
12579 const struct camera3_device *device, int fd)
12580{
12581 /* Log level property is read when "adb shell dumpsys media.camera" is
12582 called so that the log level can be controlled without restarting
12583 the media server */
12584 getLogLevel();
12585
12586 LOGD("E");
12587 QCamera3HardwareInterface *hw =
12588 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12589 if (!hw) {
12590 LOGE("NULL camera device");
12591 return;
12592 }
12593
12594 hw->dump(fd);
12595 LOGD("X");
12596 return;
12597}
12598
12599/*===========================================================================
12600 * FUNCTION : flush
12601 *
12602 * DESCRIPTION:
12603 *
12604 * PARAMETERS :
12605 *
12606 *
12607 * RETURN :
12608 *==========================================================================*/
12609
12610int QCamera3HardwareInterface::flush(
12611 const struct camera3_device *device)
12612{
12613 int rc;
12614 LOGD("E");
12615 QCamera3HardwareInterface *hw =
12616 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12617 if (!hw) {
12618 LOGE("NULL camera device");
12619 return -EINVAL;
12620 }
12621
12622 pthread_mutex_lock(&hw->mMutex);
12623 // Validate current state
12624 switch (hw->mState) {
12625 case STARTED:
12626 /* valid state */
12627 break;
12628
12629 case ERROR:
12630 pthread_mutex_unlock(&hw->mMutex);
12631 hw->handleCameraDeviceError();
12632 return -ENODEV;
12633
12634 default:
12635 LOGI("Flush returned during state %d", hw->mState);
12636 pthread_mutex_unlock(&hw->mMutex);
12637 return 0;
12638 }
12639 pthread_mutex_unlock(&hw->mMutex);
12640
12641 rc = hw->flush(true /* restart channels */ );
12642 LOGD("X");
12643 return rc;
12644}
12645
12646/*===========================================================================
12647 * FUNCTION : close_camera_device
12648 *
12649 * DESCRIPTION:
12650 *
12651 * PARAMETERS :
12652 *
12653 *
12654 * RETURN :
12655 *==========================================================================*/
12656int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12657{
12658 int ret = NO_ERROR;
12659 QCamera3HardwareInterface *hw =
12660 reinterpret_cast<QCamera3HardwareInterface *>(
12661 reinterpret_cast<camera3_device_t *>(device)->priv);
12662 if (!hw) {
12663 LOGE("NULL camera device");
12664 return BAD_VALUE;
12665 }
12666
12667 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12668 delete hw;
12669 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012670 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012671 return ret;
12672}
12673
12674/*===========================================================================
12675 * FUNCTION : getWaveletDenoiseProcessPlate
12676 *
12677 * DESCRIPTION: query wavelet denoise process plate
12678 *
12679 * PARAMETERS : None
12680 *
12681 * RETURN : WNR prcocess plate value
12682 *==========================================================================*/
12683cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12684{
12685 char prop[PROPERTY_VALUE_MAX];
12686 memset(prop, 0, sizeof(prop));
12687 property_get("persist.denoise.process.plates", prop, "0");
12688 int processPlate = atoi(prop);
12689 switch(processPlate) {
12690 case 0:
12691 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12692 case 1:
12693 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12694 case 2:
12695 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12696 case 3:
12697 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12698 default:
12699 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12700 }
12701}
12702
12703
12704/*===========================================================================
12705 * FUNCTION : getTemporalDenoiseProcessPlate
12706 *
12707 * DESCRIPTION: query temporal denoise process plate
12708 *
12709 * PARAMETERS : None
12710 *
12711 * RETURN : TNR prcocess plate value
12712 *==========================================================================*/
12713cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12714{
12715 char prop[PROPERTY_VALUE_MAX];
12716 memset(prop, 0, sizeof(prop));
12717 property_get("persist.tnr.process.plates", prop, "0");
12718 int processPlate = atoi(prop);
12719 switch(processPlate) {
12720 case 0:
12721 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12722 case 1:
12723 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12724 case 2:
12725 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12726 case 3:
12727 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12728 default:
12729 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12730 }
12731}
12732
12733
12734/*===========================================================================
12735 * FUNCTION : extractSceneMode
12736 *
12737 * DESCRIPTION: Extract scene mode from frameworks set metadata
12738 *
12739 * PARAMETERS :
12740 * @frame_settings: CameraMetadata reference
12741 * @metaMode: ANDROID_CONTORL_MODE
12742 * @hal_metadata: hal metadata structure
12743 *
12744 * RETURN : None
12745 *==========================================================================*/
12746int32_t QCamera3HardwareInterface::extractSceneMode(
12747 const CameraMetadata &frame_settings, uint8_t metaMode,
12748 metadata_buffer_t *hal_metadata)
12749{
12750 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012751 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12752
12753 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12754 LOGD("Ignoring control mode OFF_KEEP_STATE");
12755 return NO_ERROR;
12756 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012757
12758 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12759 camera_metadata_ro_entry entry =
12760 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12761 if (0 == entry.count)
12762 return rc;
12763
12764 uint8_t fwk_sceneMode = entry.data.u8[0];
12765
12766 int val = lookupHalName(SCENE_MODES_MAP,
12767 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12768 fwk_sceneMode);
12769 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012770 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012771 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012772 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012773 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012774
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012775 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12776 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12777 }
12778
12779 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12780 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012781 cam_hdr_param_t hdr_params;
12782 hdr_params.hdr_enable = 1;
12783 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12784 hdr_params.hdr_need_1x = false;
12785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12786 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12787 rc = BAD_VALUE;
12788 }
12789 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012790
Thierry Strudel3d639192016-09-09 11:52:26 -070012791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12792 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012796
12797 if (mForceHdrSnapshot) {
12798 cam_hdr_param_t hdr_params;
12799 hdr_params.hdr_enable = 1;
12800 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12801 hdr_params.hdr_need_1x = false;
12802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12803 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12804 rc = BAD_VALUE;
12805 }
12806 }
12807
Thierry Strudel3d639192016-09-09 11:52:26 -070012808 return rc;
12809}
12810
12811/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012812 * FUNCTION : setVideoHdrMode
12813 *
12814 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12815 *
12816 * PARAMETERS :
12817 * @hal_metadata: hal metadata structure
12818 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12819 *
12820 * RETURN : None
12821 *==========================================================================*/
12822int32_t QCamera3HardwareInterface::setVideoHdrMode(
12823 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12824{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012825 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12826 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12827 }
12828
12829 LOGE("Invalid Video HDR mode %d!", vhdr);
12830 return BAD_VALUE;
12831}
12832
12833/*===========================================================================
12834 * FUNCTION : setSensorHDR
12835 *
12836 * DESCRIPTION: Enable/disable sensor HDR.
12837 *
12838 * PARAMETERS :
12839 * @hal_metadata: hal metadata structure
12840 * @enable: boolean whether to enable/disable sensor HDR
12841 *
12842 * RETURN : None
12843 *==========================================================================*/
12844int32_t QCamera3HardwareInterface::setSensorHDR(
12845 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12846{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012847 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012848 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12849
12850 if (enable) {
12851 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12852 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12853 #ifdef _LE_CAMERA_
12854 //Default to staggered HDR for IOT
12855 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12856 #else
12857 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12858 #endif
12859 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12860 }
12861
12862 bool isSupported = false;
12863 switch (sensor_hdr) {
12864 case CAM_SENSOR_HDR_IN_SENSOR:
12865 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12866 CAM_QCOM_FEATURE_SENSOR_HDR) {
12867 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012868 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012869 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012870 break;
12871 case CAM_SENSOR_HDR_ZIGZAG:
12872 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12873 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12874 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012875 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012876 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012877 break;
12878 case CAM_SENSOR_HDR_STAGGERED:
12879 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12880 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12881 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012882 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012883 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012884 break;
12885 case CAM_SENSOR_HDR_OFF:
12886 isSupported = true;
12887 LOGD("Turning off sensor HDR");
12888 break;
12889 default:
12890 LOGE("HDR mode %d not supported", sensor_hdr);
12891 rc = BAD_VALUE;
12892 break;
12893 }
12894
12895 if(isSupported) {
12896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12897 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12898 rc = BAD_VALUE;
12899 } else {
12900 if(!isVideoHdrEnable)
12901 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012902 }
12903 }
12904 return rc;
12905}
12906
12907/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012908 * FUNCTION : needRotationReprocess
12909 *
12910 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12911 *
12912 * PARAMETERS : none
12913 *
12914 * RETURN : true: needed
12915 * false: no need
12916 *==========================================================================*/
12917bool QCamera3HardwareInterface::needRotationReprocess()
12918{
12919 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12920 // current rotation is not zero, and pp has the capability to process rotation
12921 LOGH("need do reprocess for rotation");
12922 return true;
12923 }
12924
12925 return false;
12926}
12927
12928/*===========================================================================
12929 * FUNCTION : needReprocess
12930 *
12931 * DESCRIPTION: if reprocess in needed
12932 *
12933 * PARAMETERS : none
12934 *
12935 * RETURN : true: needed
12936 * false: no need
12937 *==========================================================================*/
12938bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12939{
12940 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12941 // TODO: add for ZSL HDR later
12942 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12943 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12944 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12945 return true;
12946 } else {
12947 LOGH("already post processed frame");
12948 return false;
12949 }
12950 }
12951 return needRotationReprocess();
12952}
12953
12954/*===========================================================================
12955 * FUNCTION : needJpegExifRotation
12956 *
12957 * DESCRIPTION: if rotation from jpeg is needed
12958 *
12959 * PARAMETERS : none
12960 *
12961 * RETURN : true: needed
12962 * false: no need
12963 *==========================================================================*/
12964bool QCamera3HardwareInterface::needJpegExifRotation()
12965{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012966 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012967 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12968 LOGD("Need use Jpeg EXIF Rotation");
12969 return true;
12970 }
12971 return false;
12972}
12973
12974/*===========================================================================
12975 * FUNCTION : addOfflineReprocChannel
12976 *
12977 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12978 * coming from input channel
12979 *
12980 * PARAMETERS :
12981 * @config : reprocess configuration
12982 * @inputChHandle : pointer to the input (source) channel
12983 *
12984 *
12985 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12986 *==========================================================================*/
12987QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12988 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12989{
12990 int32_t rc = NO_ERROR;
12991 QCamera3ReprocessChannel *pChannel = NULL;
12992
12993 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012994 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12995 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012996 if (NULL == pChannel) {
12997 LOGE("no mem for reprocess channel");
12998 return NULL;
12999 }
13000
13001 rc = pChannel->initialize(IS_TYPE_NONE);
13002 if (rc != NO_ERROR) {
13003 LOGE("init reprocess channel failed, ret = %d", rc);
13004 delete pChannel;
13005 return NULL;
13006 }
13007
13008 // pp feature config
13009 cam_pp_feature_config_t pp_config;
13010 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13011
13012 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13013 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13014 & CAM_QCOM_FEATURE_DSDN) {
13015 //Use CPP CDS incase h/w supports it.
13016 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13017 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13018 }
13019 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13020 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13021 }
13022
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013023 if (config.hdr_param.hdr_enable) {
13024 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13025 pp_config.hdr_param = config.hdr_param;
13026 }
13027
13028 if (mForceHdrSnapshot) {
13029 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13030 pp_config.hdr_param.hdr_enable = 1;
13031 pp_config.hdr_param.hdr_need_1x = 0;
13032 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13033 }
13034
Thierry Strudel3d639192016-09-09 11:52:26 -070013035 rc = pChannel->addReprocStreamsFromSource(pp_config,
13036 config,
13037 IS_TYPE_NONE,
13038 mMetadataChannel);
13039
13040 if (rc != NO_ERROR) {
13041 delete pChannel;
13042 return NULL;
13043 }
13044 return pChannel;
13045}
13046
13047/*===========================================================================
13048 * FUNCTION : getMobicatMask
13049 *
13050 * DESCRIPTION: returns mobicat mask
13051 *
13052 * PARAMETERS : none
13053 *
13054 * RETURN : mobicat mask
13055 *
13056 *==========================================================================*/
13057uint8_t QCamera3HardwareInterface::getMobicatMask()
13058{
13059 return m_MobicatMask;
13060}
13061
13062/*===========================================================================
13063 * FUNCTION : setMobicat
13064 *
13065 * DESCRIPTION: set Mobicat on/off.
13066 *
13067 * PARAMETERS :
13068 * @params : none
13069 *
13070 * RETURN : int32_t type of status
13071 * NO_ERROR -- success
13072 * none-zero failure code
13073 *==========================================================================*/
13074int32_t QCamera3HardwareInterface::setMobicat()
13075{
13076 char value [PROPERTY_VALUE_MAX];
13077 property_get("persist.camera.mobicat", value, "0");
13078 int32_t ret = NO_ERROR;
13079 uint8_t enableMobi = (uint8_t)atoi(value);
13080
13081 if (enableMobi) {
13082 tune_cmd_t tune_cmd;
13083 tune_cmd.type = SET_RELOAD_CHROMATIX;
13084 tune_cmd.module = MODULE_ALL;
13085 tune_cmd.value = TRUE;
13086 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13087 CAM_INTF_PARM_SET_VFE_COMMAND,
13088 tune_cmd);
13089
13090 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13091 CAM_INTF_PARM_SET_PP_COMMAND,
13092 tune_cmd);
13093 }
13094 m_MobicatMask = enableMobi;
13095
13096 return ret;
13097}
13098
13099/*===========================================================================
13100* FUNCTION : getLogLevel
13101*
13102* DESCRIPTION: Reads the log level property into a variable
13103*
13104* PARAMETERS :
13105* None
13106*
13107* RETURN :
13108* None
13109*==========================================================================*/
13110void QCamera3HardwareInterface::getLogLevel()
13111{
13112 char prop[PROPERTY_VALUE_MAX];
13113 uint32_t globalLogLevel = 0;
13114
13115 property_get("persist.camera.hal.debug", prop, "0");
13116 int val = atoi(prop);
13117 if (0 <= val) {
13118 gCamHal3LogLevel = (uint32_t)val;
13119 }
13120
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013121 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013122 gKpiDebugLevel = atoi(prop);
13123
13124 property_get("persist.camera.global.debug", prop, "0");
13125 val = atoi(prop);
13126 if (0 <= val) {
13127 globalLogLevel = (uint32_t)val;
13128 }
13129
13130 /* Highest log level among hal.logs and global.logs is selected */
13131 if (gCamHal3LogLevel < globalLogLevel)
13132 gCamHal3LogLevel = globalLogLevel;
13133
13134 return;
13135}
13136
13137/*===========================================================================
13138 * FUNCTION : validateStreamRotations
13139 *
13140 * DESCRIPTION: Check if the rotations requested are supported
13141 *
13142 * PARAMETERS :
13143 * @stream_list : streams to be configured
13144 *
13145 * RETURN : NO_ERROR on success
13146 * -EINVAL on failure
13147 *
13148 *==========================================================================*/
13149int QCamera3HardwareInterface::validateStreamRotations(
13150 camera3_stream_configuration_t *streamList)
13151{
13152 int rc = NO_ERROR;
13153
13154 /*
13155 * Loop through all streams requested in configuration
13156 * Check if unsupported rotations have been requested on any of them
13157 */
13158 for (size_t j = 0; j < streamList->num_streams; j++){
13159 camera3_stream_t *newStream = streamList->streams[j];
13160
13161 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13162 bool isImplDef = (newStream->format ==
13163 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13164 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13165 isImplDef);
13166
13167 if (isRotated && (!isImplDef || isZsl)) {
13168 LOGE("Error: Unsupported rotation of %d requested for stream"
13169 "type:%d and stream format:%d",
13170 newStream->rotation, newStream->stream_type,
13171 newStream->format);
13172 rc = -EINVAL;
13173 break;
13174 }
13175 }
13176
13177 return rc;
13178}
13179
13180/*===========================================================================
13181* FUNCTION : getFlashInfo
13182*
13183* DESCRIPTION: Retrieve information about whether the device has a flash.
13184*
13185* PARAMETERS :
13186* @cameraId : Camera id to query
13187* @hasFlash : Boolean indicating whether there is a flash device
13188* associated with given camera
13189* @flashNode : If a flash device exists, this will be its device node.
13190*
13191* RETURN :
13192* None
13193*==========================================================================*/
13194void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13195 bool& hasFlash,
13196 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13197{
13198 cam_capability_t* camCapability = gCamCapability[cameraId];
13199 if (NULL == camCapability) {
13200 hasFlash = false;
13201 flashNode[0] = '\0';
13202 } else {
13203 hasFlash = camCapability->flash_available;
13204 strlcpy(flashNode,
13205 (char*)camCapability->flash_dev_name,
13206 QCAMERA_MAX_FILEPATH_LENGTH);
13207 }
13208}
13209
13210/*===========================================================================
13211* FUNCTION : getEepromVersionInfo
13212*
13213* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13214*
13215* PARAMETERS : None
13216*
13217* RETURN : string describing EEPROM version
13218* "\0" if no such info available
13219*==========================================================================*/
13220const char *QCamera3HardwareInterface::getEepromVersionInfo()
13221{
13222 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13223}
13224
13225/*===========================================================================
13226* FUNCTION : getLdafCalib
13227*
13228* DESCRIPTION: Retrieve Laser AF calibration data
13229*
13230* PARAMETERS : None
13231*
13232* RETURN : Two uint32_t describing laser AF calibration data
13233* NULL if none is available.
13234*==========================================================================*/
13235const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13236{
13237 if (mLdafCalibExist) {
13238 return &mLdafCalib[0];
13239 } else {
13240 return NULL;
13241 }
13242}
13243
13244/*===========================================================================
13245 * FUNCTION : dynamicUpdateMetaStreamInfo
13246 *
13247 * DESCRIPTION: This function:
13248 * (1) stops all the channels
13249 * (2) returns error on pending requests and buffers
13250 * (3) sends metastream_info in setparams
13251 * (4) starts all channels
13252 * This is useful when sensor has to be restarted to apply any
13253 * settings such as frame rate from a different sensor mode
13254 *
13255 * PARAMETERS : None
13256 *
13257 * RETURN : NO_ERROR on success
13258 * Error codes on failure
13259 *
13260 *==========================================================================*/
13261int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13262{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013263 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013264 int rc = NO_ERROR;
13265
13266 LOGD("E");
13267
13268 rc = stopAllChannels();
13269 if (rc < 0) {
13270 LOGE("stopAllChannels failed");
13271 return rc;
13272 }
13273
13274 rc = notifyErrorForPendingRequests();
13275 if (rc < 0) {
13276 LOGE("notifyErrorForPendingRequests failed");
13277 return rc;
13278 }
13279
13280 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13281 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13282 "Format:%d",
13283 mStreamConfigInfo.type[i],
13284 mStreamConfigInfo.stream_sizes[i].width,
13285 mStreamConfigInfo.stream_sizes[i].height,
13286 mStreamConfigInfo.postprocess_mask[i],
13287 mStreamConfigInfo.format[i]);
13288 }
13289
13290 /* Send meta stream info once again so that ISP can start */
13291 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13292 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13293 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13294 mParameters);
13295 if (rc < 0) {
13296 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13297 }
13298
13299 rc = startAllChannels();
13300 if (rc < 0) {
13301 LOGE("startAllChannels failed");
13302 return rc;
13303 }
13304
13305 LOGD("X");
13306 return rc;
13307}
13308
13309/*===========================================================================
13310 * FUNCTION : stopAllChannels
13311 *
13312 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13313 *
13314 * PARAMETERS : None
13315 *
13316 * RETURN : NO_ERROR on success
13317 * Error codes on failure
13318 *
13319 *==========================================================================*/
13320int32_t QCamera3HardwareInterface::stopAllChannels()
13321{
13322 int32_t rc = NO_ERROR;
13323
13324 LOGD("Stopping all channels");
13325 // Stop the Streams/Channels
13326 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13327 it != mStreamInfo.end(); it++) {
13328 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13329 if (channel) {
13330 channel->stop();
13331 }
13332 (*it)->status = INVALID;
13333 }
13334
13335 if (mSupportChannel) {
13336 mSupportChannel->stop();
13337 }
13338 if (mAnalysisChannel) {
13339 mAnalysisChannel->stop();
13340 }
13341 if (mRawDumpChannel) {
13342 mRawDumpChannel->stop();
13343 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013344 if (mHdrPlusRawSrcChannel) {
13345 mHdrPlusRawSrcChannel->stop();
13346 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013347 if (mMetadataChannel) {
13348 /* If content of mStreamInfo is not 0, there is metadata stream */
13349 mMetadataChannel->stop();
13350 }
13351
13352 LOGD("All channels stopped");
13353 return rc;
13354}
13355
13356/*===========================================================================
13357 * FUNCTION : startAllChannels
13358 *
13359 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13360 *
13361 * PARAMETERS : None
13362 *
13363 * RETURN : NO_ERROR on success
13364 * Error codes on failure
13365 *
13366 *==========================================================================*/
13367int32_t QCamera3HardwareInterface::startAllChannels()
13368{
13369 int32_t rc = NO_ERROR;
13370
13371 LOGD("Start all channels ");
13372 // Start the Streams/Channels
13373 if (mMetadataChannel) {
13374 /* If content of mStreamInfo is not 0, there is metadata stream */
13375 rc = mMetadataChannel->start();
13376 if (rc < 0) {
13377 LOGE("META channel start failed");
13378 return rc;
13379 }
13380 }
13381 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13382 it != mStreamInfo.end(); it++) {
13383 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13384 if (channel) {
13385 rc = channel->start();
13386 if (rc < 0) {
13387 LOGE("channel start failed");
13388 return rc;
13389 }
13390 }
13391 }
13392 if (mAnalysisChannel) {
13393 mAnalysisChannel->start();
13394 }
13395 if (mSupportChannel) {
13396 rc = mSupportChannel->start();
13397 if (rc < 0) {
13398 LOGE("Support channel start failed");
13399 return rc;
13400 }
13401 }
13402 if (mRawDumpChannel) {
13403 rc = mRawDumpChannel->start();
13404 if (rc < 0) {
13405 LOGE("RAW dump channel start failed");
13406 return rc;
13407 }
13408 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013409 if (mHdrPlusRawSrcChannel) {
13410 rc = mHdrPlusRawSrcChannel->start();
13411 if (rc < 0) {
13412 LOGE("HDR+ RAW channel start failed");
13413 return rc;
13414 }
13415 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013416
13417 LOGD("All channels started");
13418 return rc;
13419}
13420
13421/*===========================================================================
13422 * FUNCTION : notifyErrorForPendingRequests
13423 *
13424 * DESCRIPTION: This function sends error for all the pending requests/buffers
13425 *
13426 * PARAMETERS : None
13427 *
13428 * RETURN : Error codes
13429 * NO_ERROR on success
13430 *
13431 *==========================================================================*/
13432int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13433{
13434 int32_t rc = NO_ERROR;
13435 unsigned int frameNum = 0;
13436 camera3_capture_result_t result;
13437 camera3_stream_buffer_t *pStream_Buf = NULL;
13438
13439 memset(&result, 0, sizeof(camera3_capture_result_t));
13440
13441 if (mPendingRequestsList.size() > 0) {
13442 pendingRequestIterator i = mPendingRequestsList.begin();
13443 frameNum = i->frame_number;
13444 } else {
13445 /* There might still be pending buffers even though there are
13446 no pending requests. Setting the frameNum to MAX so that
13447 all the buffers with smaller frame numbers are returned */
13448 frameNum = UINT_MAX;
13449 }
13450
13451 LOGH("Oldest frame num on mPendingRequestsList = %u",
13452 frameNum);
13453
Emilian Peev7650c122017-01-19 08:24:33 -080013454 notifyErrorFoPendingDepthData(mDepthChannel);
13455
Thierry Strudel3d639192016-09-09 11:52:26 -070013456 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13457 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13458
13459 if (req->frame_number < frameNum) {
13460 // Send Error notify to frameworks for each buffer for which
13461 // metadata buffer is already sent
13462 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13463 req->frame_number, req->mPendingBufferList.size());
13464
13465 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13466 if (NULL == pStream_Buf) {
13467 LOGE("No memory for pending buffers array");
13468 return NO_MEMORY;
13469 }
13470 memset(pStream_Buf, 0,
13471 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13472 result.result = NULL;
13473 result.frame_number = req->frame_number;
13474 result.num_output_buffers = req->mPendingBufferList.size();
13475 result.output_buffers = pStream_Buf;
13476
13477 size_t index = 0;
13478 for (auto info = req->mPendingBufferList.begin();
13479 info != req->mPendingBufferList.end(); ) {
13480
13481 camera3_notify_msg_t notify_msg;
13482 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13483 notify_msg.type = CAMERA3_MSG_ERROR;
13484 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13485 notify_msg.message.error.error_stream = info->stream;
13486 notify_msg.message.error.frame_number = req->frame_number;
13487 pStream_Buf[index].acquire_fence = -1;
13488 pStream_Buf[index].release_fence = -1;
13489 pStream_Buf[index].buffer = info->buffer;
13490 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13491 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013492 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013493 index++;
13494 // Remove buffer from list
13495 info = req->mPendingBufferList.erase(info);
13496 }
13497
13498 // Remove this request from Map
13499 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13500 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13501 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13502
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013503 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013504
13505 delete [] pStream_Buf;
13506 } else {
13507
13508 // Go through the pending requests info and send error request to framework
13509 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13510
13511 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13512
13513 // Send error notify to frameworks
13514 camera3_notify_msg_t notify_msg;
13515 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13516 notify_msg.type = CAMERA3_MSG_ERROR;
13517 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13518 notify_msg.message.error.error_stream = NULL;
13519 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013520 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013521
13522 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13523 if (NULL == pStream_Buf) {
13524 LOGE("No memory for pending buffers array");
13525 return NO_MEMORY;
13526 }
13527 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13528
13529 result.result = NULL;
13530 result.frame_number = req->frame_number;
13531 result.input_buffer = i->input_buffer;
13532 result.num_output_buffers = req->mPendingBufferList.size();
13533 result.output_buffers = pStream_Buf;
13534
13535 size_t index = 0;
13536 for (auto info = req->mPendingBufferList.begin();
13537 info != req->mPendingBufferList.end(); ) {
13538 pStream_Buf[index].acquire_fence = -1;
13539 pStream_Buf[index].release_fence = -1;
13540 pStream_Buf[index].buffer = info->buffer;
13541 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13542 pStream_Buf[index].stream = info->stream;
13543 index++;
13544 // Remove buffer from list
13545 info = req->mPendingBufferList.erase(info);
13546 }
13547
13548 // Remove this request from Map
13549 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13550 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13551 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13552
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013553 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013554 delete [] pStream_Buf;
13555 i = erasePendingRequest(i);
13556 }
13557 }
13558
13559 /* Reset pending frame Drop list and requests list */
13560 mPendingFrameDropList.clear();
13561
13562 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13563 req.mPendingBufferList.clear();
13564 }
13565 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013566 LOGH("Cleared all the pending buffers ");
13567
13568 return rc;
13569}
13570
13571bool QCamera3HardwareInterface::isOnEncoder(
13572 const cam_dimension_t max_viewfinder_size,
13573 uint32_t width, uint32_t height)
13574{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013575 return ((width > (uint32_t)max_viewfinder_size.width) ||
13576 (height > (uint32_t)max_viewfinder_size.height) ||
13577 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13578 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013579}
13580
13581/*===========================================================================
13582 * FUNCTION : setBundleInfo
13583 *
13584 * DESCRIPTION: Set bundle info for all streams that are bundle.
13585 *
13586 * PARAMETERS : None
13587 *
13588 * RETURN : NO_ERROR on success
13589 * Error codes on failure
13590 *==========================================================================*/
13591int32_t QCamera3HardwareInterface::setBundleInfo()
13592{
13593 int32_t rc = NO_ERROR;
13594
13595 if (mChannelHandle) {
13596 cam_bundle_config_t bundleInfo;
13597 memset(&bundleInfo, 0, sizeof(bundleInfo));
13598 rc = mCameraHandle->ops->get_bundle_info(
13599 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13600 if (rc != NO_ERROR) {
13601 LOGE("get_bundle_info failed");
13602 return rc;
13603 }
13604 if (mAnalysisChannel) {
13605 mAnalysisChannel->setBundleInfo(bundleInfo);
13606 }
13607 if (mSupportChannel) {
13608 mSupportChannel->setBundleInfo(bundleInfo);
13609 }
13610 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13611 it != mStreamInfo.end(); it++) {
13612 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13613 channel->setBundleInfo(bundleInfo);
13614 }
13615 if (mRawDumpChannel) {
13616 mRawDumpChannel->setBundleInfo(bundleInfo);
13617 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013618 if (mHdrPlusRawSrcChannel) {
13619 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13620 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013621 }
13622
13623 return rc;
13624}
13625
13626/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013627 * FUNCTION : setInstantAEC
13628 *
13629 * DESCRIPTION: Set Instant AEC related params.
13630 *
13631 * PARAMETERS :
13632 * @meta: CameraMetadata reference
13633 *
13634 * RETURN : NO_ERROR on success
13635 * Error codes on failure
13636 *==========================================================================*/
13637int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13638{
13639 int32_t rc = NO_ERROR;
13640 uint8_t val = 0;
13641 char prop[PROPERTY_VALUE_MAX];
13642
13643 // First try to configure instant AEC from framework metadata
13644 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13645 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13646 }
13647
13648 // If framework did not set this value, try to read from set prop.
13649 if (val == 0) {
13650 memset(prop, 0, sizeof(prop));
13651 property_get("persist.camera.instant.aec", prop, "0");
13652 val = (uint8_t)atoi(prop);
13653 }
13654
13655 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13656 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13657 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13658 mInstantAEC = val;
13659 mInstantAECSettledFrameNumber = 0;
13660 mInstantAecFrameIdxCount = 0;
13661 LOGH("instantAEC value set %d",val);
13662 if (mInstantAEC) {
13663 memset(prop, 0, sizeof(prop));
13664 property_get("persist.camera.ae.instant.bound", prop, "10");
13665 int32_t aec_frame_skip_cnt = atoi(prop);
13666 if (aec_frame_skip_cnt >= 0) {
13667 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13668 } else {
13669 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13670 rc = BAD_VALUE;
13671 }
13672 }
13673 } else {
13674 LOGE("Bad instant aec value set %d", val);
13675 rc = BAD_VALUE;
13676 }
13677 return rc;
13678}
13679
13680/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013681 * FUNCTION : get_num_overall_buffers
13682 *
13683 * DESCRIPTION: Estimate number of pending buffers across all requests.
13684 *
13685 * PARAMETERS : None
13686 *
13687 * RETURN : Number of overall pending buffers
13688 *
13689 *==========================================================================*/
13690uint32_t PendingBuffersMap::get_num_overall_buffers()
13691{
13692 uint32_t sum_buffers = 0;
13693 for (auto &req : mPendingBuffersInRequest) {
13694 sum_buffers += req.mPendingBufferList.size();
13695 }
13696 return sum_buffers;
13697}
13698
13699/*===========================================================================
13700 * FUNCTION : removeBuf
13701 *
13702 * DESCRIPTION: Remove a matching buffer from tracker.
13703 *
13704 * PARAMETERS : @buffer: image buffer for the callback
13705 *
13706 * RETURN : None
13707 *
13708 *==========================================================================*/
13709void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13710{
13711 bool buffer_found = false;
13712 for (auto req = mPendingBuffersInRequest.begin();
13713 req != mPendingBuffersInRequest.end(); req++) {
13714 for (auto k = req->mPendingBufferList.begin();
13715 k != req->mPendingBufferList.end(); k++ ) {
13716 if (k->buffer == buffer) {
13717 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13718 req->frame_number, buffer);
13719 k = req->mPendingBufferList.erase(k);
13720 if (req->mPendingBufferList.empty()) {
13721 // Remove this request from Map
13722 req = mPendingBuffersInRequest.erase(req);
13723 }
13724 buffer_found = true;
13725 break;
13726 }
13727 }
13728 if (buffer_found) {
13729 break;
13730 }
13731 }
13732 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13733 get_num_overall_buffers());
13734}
13735
13736/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013737 * FUNCTION : getBufErrStatus
13738 *
13739 * DESCRIPTION: get buffer error status
13740 *
13741 * PARAMETERS : @buffer: buffer handle
13742 *
13743 * RETURN : Error status
13744 *
13745 *==========================================================================*/
13746int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13747{
13748 for (auto& req : mPendingBuffersInRequest) {
13749 for (auto& k : req.mPendingBufferList) {
13750 if (k.buffer == buffer)
13751 return k.bufStatus;
13752 }
13753 }
13754 return CAMERA3_BUFFER_STATUS_OK;
13755}
13756
13757/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013758 * FUNCTION : setPAAFSupport
13759 *
13760 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13761 * feature mask according to stream type and filter
13762 * arrangement
13763 *
13764 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13765 * @stream_type: stream type
13766 * @filter_arrangement: filter arrangement
13767 *
13768 * RETURN : None
13769 *==========================================================================*/
13770void QCamera3HardwareInterface::setPAAFSupport(
13771 cam_feature_mask_t& feature_mask,
13772 cam_stream_type_t stream_type,
13773 cam_color_filter_arrangement_t filter_arrangement)
13774{
Thierry Strudel3d639192016-09-09 11:52:26 -070013775 switch (filter_arrangement) {
13776 case CAM_FILTER_ARRANGEMENT_RGGB:
13777 case CAM_FILTER_ARRANGEMENT_GRBG:
13778 case CAM_FILTER_ARRANGEMENT_GBRG:
13779 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013780 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13781 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013782 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013783 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13784 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013785 }
13786 break;
13787 case CAM_FILTER_ARRANGEMENT_Y:
13788 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13789 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13790 }
13791 break;
13792 default:
13793 break;
13794 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070013795 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13796 feature_mask, stream_type, filter_arrangement);
13797
13798
Thierry Strudel3d639192016-09-09 11:52:26 -070013799}
13800
13801/*===========================================================================
13802* FUNCTION : getSensorMountAngle
13803*
13804* DESCRIPTION: Retrieve sensor mount angle
13805*
13806* PARAMETERS : None
13807*
13808* RETURN : sensor mount angle in uint32_t
13809*==========================================================================*/
13810uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13811{
13812 return gCamCapability[mCameraId]->sensor_mount_angle;
13813}
13814
13815/*===========================================================================
13816* FUNCTION : getRelatedCalibrationData
13817*
13818* DESCRIPTION: Retrieve related system calibration data
13819*
13820* PARAMETERS : None
13821*
13822* RETURN : Pointer of related system calibration data
13823*==========================================================================*/
13824const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13825{
13826 return (const cam_related_system_calibration_data_t *)
13827 &(gCamCapability[mCameraId]->related_cam_calibration);
13828}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013829
13830/*===========================================================================
13831 * FUNCTION : is60HzZone
13832 *
13833 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13834 *
13835 * PARAMETERS : None
13836 *
13837 * RETURN : True if in 60Hz zone, False otherwise
13838 *==========================================================================*/
13839bool QCamera3HardwareInterface::is60HzZone()
13840{
13841 time_t t = time(NULL);
13842 struct tm lt;
13843
13844 struct tm* r = localtime_r(&t, &lt);
13845
13846 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13847 return true;
13848 else
13849 return false;
13850}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013851
13852/*===========================================================================
13853 * FUNCTION : adjustBlackLevelForCFA
13854 *
13855 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13856 * of bayer CFA (Color Filter Array).
13857 *
13858 * PARAMETERS : @input: black level pattern in the order of RGGB
13859 * @output: black level pattern in the order of CFA
13860 * @color_arrangement: CFA color arrangement
13861 *
13862 * RETURN : None
13863 *==========================================================================*/
13864template<typename T>
13865void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13866 T input[BLACK_LEVEL_PATTERN_CNT],
13867 T output[BLACK_LEVEL_PATTERN_CNT],
13868 cam_color_filter_arrangement_t color_arrangement)
13869{
13870 switch (color_arrangement) {
13871 case CAM_FILTER_ARRANGEMENT_GRBG:
13872 output[0] = input[1];
13873 output[1] = input[0];
13874 output[2] = input[3];
13875 output[3] = input[2];
13876 break;
13877 case CAM_FILTER_ARRANGEMENT_GBRG:
13878 output[0] = input[2];
13879 output[1] = input[3];
13880 output[2] = input[0];
13881 output[3] = input[1];
13882 break;
13883 case CAM_FILTER_ARRANGEMENT_BGGR:
13884 output[0] = input[3];
13885 output[1] = input[2];
13886 output[2] = input[1];
13887 output[3] = input[0];
13888 break;
13889 case CAM_FILTER_ARRANGEMENT_RGGB:
13890 output[0] = input[0];
13891 output[1] = input[1];
13892 output[2] = input[2];
13893 output[3] = input[3];
13894 break;
13895 default:
13896 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13897 break;
13898 }
13899}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013900
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013901void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13902 CameraMetadata &resultMetadata,
13903 std::shared_ptr<metadata_buffer_t> settings)
13904{
13905 if (settings == nullptr) {
13906 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13907 return;
13908 }
13909
13910 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13911 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13912 }
13913
13914 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13915 String8 str((const char *)gps_methods);
13916 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13917 }
13918
13919 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13920 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13921 }
13922
13923 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13924 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13925 }
13926
13927 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13928 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13929 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13930 }
13931
13932 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13933 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13934 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13935 }
13936
13937 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13938 int32_t fwk_thumb_size[2];
13939 fwk_thumb_size[0] = thumb_size->width;
13940 fwk_thumb_size[1] = thumb_size->height;
13941 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13942 }
13943
13944 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13945 uint8_t fwk_intent = intent[0];
13946 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13947 }
13948}
13949
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013950bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13951 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13952{
13953 if (hdrPlusRequest == nullptr) return false;
13954
13955 // Check noise reduction mode is high quality.
13956 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13957 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13958 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013959 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13960 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013961 return false;
13962 }
13963
13964 // Check edge mode is high quality.
13965 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13966 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13967 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13968 return false;
13969 }
13970
13971 if (request.num_output_buffers != 1 ||
13972 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13973 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013974 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13975 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13976 request.output_buffers[0].stream->width,
13977 request.output_buffers[0].stream->height,
13978 request.output_buffers[0].stream->format);
13979 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013980 return false;
13981 }
13982
13983 // Get a YUV buffer from pic channel.
13984 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13985 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13986 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13987 if (res != OK) {
13988 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13989 __FUNCTION__, strerror(-res), res);
13990 return false;
13991 }
13992
13993 pbcamera::StreamBuffer buffer;
13994 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080013995 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013996 buffer.data = yuvBuffer->buffer;
13997 buffer.dataSize = yuvBuffer->frame_len;
13998
13999 pbcamera::CaptureRequest pbRequest;
14000 pbRequest.id = request.frame_number;
14001 pbRequest.outputBuffers.push_back(buffer);
14002
14003 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014004 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014005 if (res != OK) {
14006 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14007 strerror(-res), res);
14008 return false;
14009 }
14010
14011 hdrPlusRequest->yuvBuffer = yuvBuffer;
14012 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14013
14014 return true;
14015}
14016
Chien-Yu Chenee335912017-02-09 17:53:20 -080014017status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14018{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014019 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014020 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
14021 return -ENODEV;
14022 }
14023
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014024 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014025
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014026 // Connect to HDR+ service if it's not connected yet.
14027 pthread_mutex_lock(&gCamLock);
14028 if (!gEaselConnected) {
14029 // Connect to HDR+ service
14030 res = gHdrPlusClient->connect(this);
14031 if (res != OK) {
14032 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
14033 strerror(-res), res);
14034 pthread_mutex_unlock(&gCamLock);
14035 return res;
14036 }
14037
14038 // Set static metadata.
14039 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14040 if (res != OK) {
14041 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
14042 strerror(-res), res);
14043 gHdrPlusClient->disconnect();
14044 pthread_mutex_unlock(&gCamLock);
14045 return res;
14046 }
14047 gEaselConnected = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014048 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014049 pthread_mutex_unlock(&gCamLock);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014050
14051 // Configure stream for HDR+.
14052 res = configureHdrPlusStreamsLocked();
14053 if (res != OK) {
14054 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014055 return res;
14056 }
14057
14058 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14059 res = gHdrPlusClient->setZslHdrPlusMode(true);
14060 if (res != OK) {
14061 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014062 return res;
14063 }
14064
14065 mHdrPlusModeEnabled = true;
14066 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14067
14068 return OK;
14069}
14070
14071void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14072{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014073 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014074 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014075 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14076 if (res != OK) {
14077 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14078 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014079 }
14080
14081 mHdrPlusModeEnabled = false;
14082 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14083}
14084
14085status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014086{
14087 pbcamera::InputConfiguration inputConfig;
14088 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14089 status_t res = OK;
14090
14091 // Configure HDR+ client streams.
14092 // Get input config.
14093 if (mHdrPlusRawSrcChannel) {
14094 // HDR+ input buffers will be provided by HAL.
14095 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14096 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14097 if (res != OK) {
14098 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14099 __FUNCTION__, strerror(-res), res);
14100 return res;
14101 }
14102
14103 inputConfig.isSensorInput = false;
14104 } else {
14105 // Sensor MIPI will send data to Easel.
14106 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014107 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014108 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14109 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14110 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14111 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14112 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14113 if (mSensorModeInfo.num_raw_bits != 10) {
14114 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14115 mSensorModeInfo.num_raw_bits);
14116 return BAD_VALUE;
14117 }
14118
14119 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014120 }
14121
14122 // Get output configurations.
14123 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014124 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014125
14126 // Easel may need to output YUV output buffers if mPictureChannel was created.
14127 pbcamera::StreamConfiguration yuvOutputConfig;
14128 if (mPictureChannel != nullptr) {
14129 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14130 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14131 if (res != OK) {
14132 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14133 __FUNCTION__, strerror(-res), res);
14134
14135 return res;
14136 }
14137
14138 outputStreamConfigs.push_back(yuvOutputConfig);
14139 }
14140
14141 // TODO: consider other channels for YUV output buffers.
14142
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014143 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014144 if (res != OK) {
14145 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14146 strerror(-res), res);
14147 return res;
14148 }
14149
14150 return OK;
14151}
14152
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014153void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14154 const camera_metadata_t &resultMetadata) {
14155 if (result != nullptr) {
14156 if (result->outputBuffers.size() != 1) {
14157 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14158 result->outputBuffers.size());
14159 return;
14160 }
14161
14162 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14163 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14164 result->outputBuffers[0].streamId);
14165 return;
14166 }
14167
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014168 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014169 HdrPlusPendingRequest pendingRequest;
14170 {
14171 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14172 auto req = mHdrPlusPendingRequests.find(result->requestId);
14173 pendingRequest = req->second;
14174 }
14175
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014176 // Update the result metadata with the settings of the HDR+ still capture request because
14177 // the result metadata belongs to a ZSL buffer.
14178 CameraMetadata metadata;
14179 metadata = &resultMetadata;
14180 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14181 camera_metadata_t* updatedResultMetadata = metadata.release();
14182
14183 QCamera3PicChannel *picChannel =
14184 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14185
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014186 // Check if dumping HDR+ YUV output is enabled.
14187 char prop[PROPERTY_VALUE_MAX];
14188 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14189 bool dumpYuvOutput = atoi(prop);
14190
14191 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014192 // Dump yuv buffer to a ppm file.
14193 pbcamera::StreamConfiguration outputConfig;
14194 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14195 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14196 if (rc == OK) {
14197 char buf[FILENAME_MAX] = {};
14198 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14199 result->requestId, result->outputBuffers[0].streamId,
14200 outputConfig.image.width, outputConfig.image.height);
14201
14202 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14203 } else {
14204 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14205 __FUNCTION__, strerror(-rc), rc);
14206 }
14207 }
14208
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014209 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14210 auto halMetadata = std::make_shared<metadata_buffer_t>();
14211 clear_metadata_buffer(halMetadata.get());
14212
14213 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14214 // encoding.
14215 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14216 halStreamId, /*minFrameDuration*/0);
14217 if (res == OK) {
14218 // Return the buffer to pic channel for encoding.
14219 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14220 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14221 halMetadata);
14222 } else {
14223 // Return the buffer without encoding.
14224 // TODO: This should not happen but we may want to report an error buffer to camera
14225 // service.
14226 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14227 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14228 strerror(-res), res);
14229 }
14230
14231 // Send HDR+ metadata to framework.
14232 {
14233 pthread_mutex_lock(&mMutex);
14234
14235 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14236 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14237 pthread_mutex_unlock(&mMutex);
14238 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014239
14240 // Remove the HDR+ pending request.
14241 {
14242 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14243 auto req = mHdrPlusPendingRequests.find(result->requestId);
14244 mHdrPlusPendingRequests.erase(req);
14245 }
14246 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014247}
14248
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014249void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14250 // TODO: Handle HDR+ capture failures and send the failure to framework.
14251 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14252 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14253
14254 // Return the buffer to pic channel.
14255 QCamera3PicChannel *picChannel =
14256 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14257 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14258
14259 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014260}
14261
Thierry Strudel3d639192016-09-09 11:52:26 -070014262}; //end namespace qcamera