blob: 0df6c03596335a629fa28d9edde97ddd91ccb4f5 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700135// If Easel is connected.
136bool gEaselConnected;
Thierry Strudel3d639192016-09-09 11:52:26 -0700137
138const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
139 {"On", CAM_CDS_MODE_ON},
140 {"Off", CAM_CDS_MODE_OFF},
141 {"Auto",CAM_CDS_MODE_AUTO}
142};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_video_hdr_mode_t,
145 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
146 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
147 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
148};
149
Thierry Strudel54dc9782017-02-15 12:12:10 -0800150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_binning_correction_mode_t,
152 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
153 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
154 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
155};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700156
157const QCamera3HardwareInterface::QCameraMap<
158 camera_metadata_enum_android_ir_mode_t,
159 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
160 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
161 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
162 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
163};
Thierry Strudel3d639192016-09-09 11:52:26 -0700164
165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_control_effect_mode_t,
167 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
168 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
169 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
170 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
171 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
173 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
174 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
175 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
176 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
177};
178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_control_awb_mode_t,
181 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
182 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
183 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
184 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
185 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
186 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
187 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
188 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
189 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
190 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194 camera_metadata_enum_android_control_scene_mode_t,
195 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
196 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
197 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
198 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
199 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
200 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
201 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
202 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
203 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
204 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
205 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
206 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
207 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
208 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
209 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
210 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800211 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
212 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_af_mode_t,
217 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
218 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
219 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
220 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
221 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
222 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
223 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
224 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_color_correction_aberration_mode_t,
229 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
231 CAM_COLOR_CORRECTION_ABERRATION_OFF },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
233 CAM_COLOR_CORRECTION_ABERRATION_FAST },
234 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
235 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_ae_antibanding_mode_t,
240 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
243 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
244 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_control_ae_mode_t,
249 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
250 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
251 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
253 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
254 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_flash_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
260 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
262 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_statistics_face_detect_mode_t,
267 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
269 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
270 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
275 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
277 CAM_FOCUS_UNCALIBRATED },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
279 CAM_FOCUS_APPROXIMATE },
280 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
281 CAM_FOCUS_CALIBRATED }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_state_t,
286 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
287 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
288 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
289};
290
291const int32_t available_thumbnail_sizes[] = {0, 0,
292 176, 144,
293 240, 144,
294 256, 144,
295 240, 160,
296 256, 154,
297 240, 240,
298 320, 240};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_sensor_test_pattern_mode_t,
302 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
307 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
308 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
309};
310
311/* Since there is no mapping for all the options some Android enum are not listed.
312 * Also, the order in this list is important because while mapping from HAL to Android it will
313 * traverse from lower to higher index which means that for HAL values that are map to different
314 * Android values, the traverse logic will select the first one found.
315 */
316const QCamera3HardwareInterface::QCameraMap<
317 camera_metadata_enum_android_sensor_reference_illuminant1_t,
318 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
335};
336
337const QCamera3HardwareInterface::QCameraMap<
338 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
339 { 60, CAM_HFR_MODE_60FPS},
340 { 90, CAM_HFR_MODE_90FPS},
341 { 120, CAM_HFR_MODE_120FPS},
342 { 150, CAM_HFR_MODE_150FPS},
343 { 180, CAM_HFR_MODE_180FPS},
344 { 210, CAM_HFR_MODE_210FPS},
345 { 240, CAM_HFR_MODE_240FPS},
346 { 480, CAM_HFR_MODE_480FPS},
347};
348
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700349const QCamera3HardwareInterface::QCameraMap<
350 qcamera3_ext_instant_aec_mode_t,
351 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
352 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
353 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
354 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
355};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800356
357const QCamera3HardwareInterface::QCameraMap<
358 qcamera3_ext_exposure_meter_mode_t,
359 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
360 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
361 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
362 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
363 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
364 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
365 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
366 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
367};
368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_iso_mode_t,
371 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
372 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
373 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
374 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
375 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
376 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
377 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
378 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
379 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
380};
381
Thierry Strudel3d639192016-09-09 11:52:26 -0700382camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
383 .initialize = QCamera3HardwareInterface::initialize,
384 .configure_streams = QCamera3HardwareInterface::configure_streams,
385 .register_stream_buffers = NULL,
386 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
387 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
388 .get_metadata_vendor_tag_ops = NULL,
389 .dump = QCamera3HardwareInterface::dump,
390 .flush = QCamera3HardwareInterface::flush,
391 .reserved = {0},
392};
393
394// initialise to some default value
395uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
396
397/*===========================================================================
398 * FUNCTION : QCamera3HardwareInterface
399 *
400 * DESCRIPTION: constructor of QCamera3HardwareInterface
401 *
402 * PARAMETERS :
403 * @cameraId : camera ID
404 *
405 * RETURN : none
406 *==========================================================================*/
407QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
408 const camera_module_callbacks_t *callbacks)
409 : mCameraId(cameraId),
410 mCameraHandle(NULL),
411 mCameraInitialized(false),
412 mCallbackOps(NULL),
413 mMetadataChannel(NULL),
414 mPictureChannel(NULL),
415 mRawChannel(NULL),
416 mSupportChannel(NULL),
417 mAnalysisChannel(NULL),
418 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700419 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800421 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800422 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700423 mChannelHandle(0),
424 mFirstConfiguration(true),
425 mFlush(false),
426 mFlushPerf(false),
427 mParamHeap(NULL),
428 mParameters(NULL),
429 mPrevParameters(NULL),
430 m_bIsVideo(false),
431 m_bIs4KVideo(false),
432 m_bEisSupportedSize(false),
433 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800434 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 m_MobicatMask(0),
436 mMinProcessedFrameDuration(0),
437 mMinJpegFrameDuration(0),
438 mMinRawFrameDuration(0),
439 mMetaFrameCount(0U),
440 mUpdateDebugLevel(false),
441 mCallbacks(callbacks),
442 mCaptureIntent(0),
443 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700444 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800445 /* DevCamDebug metadata internal m control*/
446 mDevCamDebugMetaEnable(0),
447 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mBatchSize(0),
449 mToBeQueuedVidBufs(0),
450 mHFRVideoFps(DEFAULT_VIDEO_FPS),
451 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800452 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800453 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mFirstFrameNumberInBatch(0),
455 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700457 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
458 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000459 mPDSupported(false),
460 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700461 mInstantAEC(false),
462 mResetInstantAEC(false),
463 mInstantAECSettledFrameNumber(0),
464 mAecSkipDisplayFrameBound(0),
465 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800466 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700467 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700468 mLastCustIntentFrmNum(-1),
469 mState(CLOSED),
470 mIsDeviceLinked(false),
471 mIsMainCamera(true),
472 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700473 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800475 mHdrPlusModeEnabled(false),
476 mIsApInputUsedForHdrPlus(false),
477 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800478 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700479{
480 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mCommon.init(gCamCapability[cameraId]);
482 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#ifndef USE_HAL_3_3
484 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
485#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mCameraDevice.common.close = close_camera_device;
489 mCameraDevice.ops = &mCameraOps;
490 mCameraDevice.priv = this;
491 gCamCapability[cameraId]->version = CAM_HAL_V3;
492 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
493 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
494 gCamCapability[cameraId]->min_num_pp_bufs = 3;
495
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800496 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700497
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800498 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mPendingLiveRequest = 0;
500 mCurrentRequestId = -1;
501 pthread_mutex_init(&mMutex, NULL);
502
503 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
504 mDefaultMetadata[i] = NULL;
505
506 // Getting system props of different kinds
507 char prop[PROPERTY_VALUE_MAX];
508 memset(prop, 0, sizeof(prop));
509 property_get("persist.camera.raw.dump", prop, "0");
510 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800511 property_get("persist.camera.hal3.force.hdr", prop, "0");
512 mForceHdrSnapshot = atoi(prop);
513
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 if (mEnableRawDump)
515 LOGD("Raw dump from Camera HAL enabled");
516
517 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
518 memset(mLdafCalib, 0, sizeof(mLdafCalib));
519
520 memset(prop, 0, sizeof(prop));
521 property_get("persist.camera.tnr.preview", prop, "0");
522 m_bTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800525 property_get("persist.camera.swtnr.preview", prop, "1");
526 m_bSwTnrPreview = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 property_get("persist.camera.tnr.video", prop, "0");
530 m_bTnrVideo = (uint8_t)atoi(prop);
531
532 memset(prop, 0, sizeof(prop));
533 property_get("persist.camera.avtimer.debug", prop, "0");
534 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800535 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700536
Thierry Strudel54dc9782017-02-15 12:12:10 -0800537 memset(prop, 0, sizeof(prop));
538 property_get("persist.camera.cacmode.disable", prop, "0");
539 m_cacModeDisabled = (uint8_t)atoi(prop);
540
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 //Load and read GPU library.
542 lib_surface_utils = NULL;
543 LINK_get_surface_pixel_alignment = NULL;
544 mSurfaceStridePadding = CAM_PAD_TO_32;
545 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
546 if (lib_surface_utils) {
547 *(void **)&LINK_get_surface_pixel_alignment =
548 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
549 if (LINK_get_surface_pixel_alignment) {
550 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
551 }
552 dlclose(lib_surface_utils);
553 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700554
Emilian Peev0f3c3162017-03-15 12:57:46 +0000555 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
556 mPDSupported = (0 <= mPDIndex) ? true : false;
557
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700558 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700559}
560
561/*===========================================================================
562 * FUNCTION : ~QCamera3HardwareInterface
563 *
564 * DESCRIPTION: destructor of QCamera3HardwareInterface
565 *
566 * PARAMETERS : none
567 *
568 * RETURN : none
569 *==========================================================================*/
570QCamera3HardwareInterface::~QCamera3HardwareInterface()
571{
572 LOGD("E");
573
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800574 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700575
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 // Disable power hint and enable the perf lock for close camera
577 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
578 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
579
580 // unlink of dualcam during close camera
581 if (mIsDeviceLinked) {
582 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
583 &m_pDualCamCmdPtr->bundle_info;
584 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
585 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
586 pthread_mutex_lock(&gCamLock);
587
588 if (mIsMainCamera == 1) {
589 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
590 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
591 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
592 // related session id should be session id of linked session
593 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
594 } else {
595 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
596 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
597 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
598 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
599 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800600 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800601 pthread_mutex_unlock(&gCamLock);
602
603 rc = mCameraHandle->ops->set_dual_cam_cmd(
604 mCameraHandle->camera_handle);
605 if (rc < 0) {
606 LOGE("Dualcam: Unlink failed, but still proceed to close");
607 }
608 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700609
610 /* We need to stop all streams before deleting any stream */
611 if (mRawDumpChannel) {
612 mRawDumpChannel->stop();
613 }
614
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700615 if (mHdrPlusRawSrcChannel) {
616 mHdrPlusRawSrcChannel->stop();
617 }
618
Thierry Strudel3d639192016-09-09 11:52:26 -0700619 // NOTE: 'camera3_stream_t *' objects are already freed at
620 // this stage by the framework
621 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
622 it != mStreamInfo.end(); it++) {
623 QCamera3ProcessingChannel *channel = (*it)->channel;
624 if (channel) {
625 channel->stop();
626 }
627 }
628 if (mSupportChannel)
629 mSupportChannel->stop();
630
631 if (mAnalysisChannel) {
632 mAnalysisChannel->stop();
633 }
634 if (mMetadataChannel) {
635 mMetadataChannel->stop();
636 }
637 if (mChannelHandle) {
638 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
639 mChannelHandle);
640 LOGD("stopping channel %d", mChannelHandle);
641 }
642
643 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
644 it != mStreamInfo.end(); it++) {
645 QCamera3ProcessingChannel *channel = (*it)->channel;
646 if (channel)
647 delete channel;
648 free (*it);
649 }
650 if (mSupportChannel) {
651 delete mSupportChannel;
652 mSupportChannel = NULL;
653 }
654
655 if (mAnalysisChannel) {
656 delete mAnalysisChannel;
657 mAnalysisChannel = NULL;
658 }
659 if (mRawDumpChannel) {
660 delete mRawDumpChannel;
661 mRawDumpChannel = NULL;
662 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700663 if (mHdrPlusRawSrcChannel) {
664 delete mHdrPlusRawSrcChannel;
665 mHdrPlusRawSrcChannel = NULL;
666 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700667 if (mDummyBatchChannel) {
668 delete mDummyBatchChannel;
669 mDummyBatchChannel = NULL;
670 }
671
672 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800673 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700674
675 if (mMetadataChannel) {
676 delete mMetadataChannel;
677 mMetadataChannel = NULL;
678 }
679
680 /* Clean up all channels */
681 if (mCameraInitialized) {
682 if(!mFirstConfiguration){
683 //send the last unconfigure
684 cam_stream_size_info_t stream_config_info;
685 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
686 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
687 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800688 m_bIs4KVideo ? 0 :
689 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700690 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700691 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
692 stream_config_info);
693 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
694 if (rc < 0) {
695 LOGE("set_parms failed for unconfigure");
696 }
697 }
698 deinitParameters();
699 }
700
701 if (mChannelHandle) {
702 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
703 mChannelHandle);
704 LOGH("deleting channel %d", mChannelHandle);
705 mChannelHandle = 0;
706 }
707
708 if (mState != CLOSED)
709 closeCamera();
710
711 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
712 req.mPendingBufferList.clear();
713 }
714 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700715 for (pendingRequestIterator i = mPendingRequestsList.begin();
716 i != mPendingRequestsList.end();) {
717 i = erasePendingRequest(i);
718 }
719 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
720 if (mDefaultMetadata[i])
721 free_camera_metadata(mDefaultMetadata[i]);
722
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800723 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700724
725 pthread_cond_destroy(&mRequestCond);
726
727 pthread_cond_destroy(&mBuffersCond);
728
729 pthread_mutex_destroy(&mMutex);
730 LOGD("X");
731}
732
733/*===========================================================================
734 * FUNCTION : erasePendingRequest
735 *
736 * DESCRIPTION: function to erase a desired pending request after freeing any
737 * allocated memory
738 *
739 * PARAMETERS :
740 * @i : iterator pointing to pending request to be erased
741 *
742 * RETURN : iterator pointing to the next request
743 *==========================================================================*/
744QCamera3HardwareInterface::pendingRequestIterator
745 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
746{
747 if (i->input_buffer != NULL) {
748 free(i->input_buffer);
749 i->input_buffer = NULL;
750 }
751 if (i->settings != NULL)
752 free_camera_metadata((camera_metadata_t*)i->settings);
753 return mPendingRequestsList.erase(i);
754}
755
756/*===========================================================================
757 * FUNCTION : camEvtHandle
758 *
759 * DESCRIPTION: Function registered to mm-camera-interface to handle events
760 *
761 * PARAMETERS :
762 * @camera_handle : interface layer camera handle
763 * @evt : ptr to event
764 * @user_data : user data ptr
765 *
766 * RETURN : none
767 *==========================================================================*/
768void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
769 mm_camera_event_t *evt,
770 void *user_data)
771{
772 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
773 if (obj && evt) {
774 switch(evt->server_event_type) {
775 case CAM_EVENT_TYPE_DAEMON_DIED:
776 pthread_mutex_lock(&obj->mMutex);
777 obj->mState = ERROR;
778 pthread_mutex_unlock(&obj->mMutex);
779 LOGE("Fatal, camera daemon died");
780 break;
781
782 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
783 LOGD("HAL got request pull from Daemon");
784 pthread_mutex_lock(&obj->mMutex);
785 obj->mWokenUpByDaemon = true;
786 obj->unblockRequestIfNecessary();
787 pthread_mutex_unlock(&obj->mMutex);
788 break;
789
790 default:
791 LOGW("Warning: Unhandled event %d",
792 evt->server_event_type);
793 break;
794 }
795 } else {
796 LOGE("NULL user_data/evt");
797 }
798}
799
800/*===========================================================================
801 * FUNCTION : openCamera
802 *
803 * DESCRIPTION: open camera
804 *
805 * PARAMETERS :
806 * @hw_device : double ptr for camera device struct
807 *
808 * RETURN : int32_t type of status
809 * NO_ERROR -- success
810 * none-zero failure code
811 *==========================================================================*/
812int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
813{
814 int rc = 0;
815 if (mState != CLOSED) {
816 *hw_device = NULL;
817 return PERMISSION_DENIED;
818 }
819
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
822 mCameraId);
823
824 rc = openCamera();
825 if (rc == 0) {
826 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800827 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700828 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800829 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
832 mCameraId, rc);
833
834 if (rc == NO_ERROR) {
835 mState = OPENED;
836 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800837
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700838 if (gHdrPlusClient != nullptr) {
839 mIsApInputUsedForHdrPlus =
840 property_get_bool("persist.camera.hdrplus.apinput", false);
841 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
842 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
843 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800844
Thierry Strudel3d639192016-09-09 11:52:26 -0700845 return rc;
846}
847
848/*===========================================================================
849 * FUNCTION : openCamera
850 *
851 * DESCRIPTION: open camera
852 *
853 * PARAMETERS : none
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera()
860{
861 int rc = 0;
862 char value[PROPERTY_VALUE_MAX];
863
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800864 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 if (mCameraHandle) {
866 LOGE("Failure: Camera already opened");
867 return ALREADY_EXISTS;
868 }
869
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800870 if (gHdrPlusClient != nullptr) {
871 rc = gHdrPlusClient->resumeEasel();
872 if (rc != 0) {
873 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
874 return rc;
875 }
876 }
877
Thierry Strudel3d639192016-09-09 11:52:26 -0700878 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
879 if (rc < 0) {
880 LOGE("Failed to reserve flash for camera id: %d",
881 mCameraId);
882 return UNKNOWN_ERROR;
883 }
884
885 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
886 if (rc) {
887 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
888 return rc;
889 }
890
891 if (!mCameraHandle) {
892 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
893 return -ENODEV;
894 }
895
896 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
897 camEvtHandle, (void *)this);
898
899 if (rc < 0) {
900 LOGE("Error, failed to register event callback");
901 /* Not closing camera here since it is already handled in destructor */
902 return FAILED_TRANSACTION;
903 }
904
905 mExifParams.debug_params =
906 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
907 if (mExifParams.debug_params) {
908 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
909 } else {
910 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
911 return NO_MEMORY;
912 }
913 mFirstConfiguration = true;
914
915 //Notify display HAL that a camera session is active.
916 //But avoid calling the same during bootup because camera service might open/close
917 //cameras at boot time during its initialization and display service will also internally
918 //wait for camera service to initialize first while calling this display API, resulting in a
919 //deadlock situation. Since boot time camera open/close calls are made only to fetch
920 //capabilities, no need of this display bw optimization.
921 //Use "service.bootanim.exit" property to know boot status.
922 property_get("service.bootanim.exit", value, "0");
923 if (atoi(value) == 1) {
924 pthread_mutex_lock(&gCamLock);
925 if (gNumCameraSessions++ == 0) {
926 setCameraLaunchStatus(true);
927 }
928 pthread_mutex_unlock(&gCamLock);
929 }
930
931 //fill the session id needed while linking dual cam
932 pthread_mutex_lock(&gCamLock);
933 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
934 &sessionId[mCameraId]);
935 pthread_mutex_unlock(&gCamLock);
936
937 if (rc < 0) {
938 LOGE("Error, failed to get sessiion id");
939 return UNKNOWN_ERROR;
940 } else {
941 //Allocate related cam sync buffer
942 //this is needed for the payload that goes along with bundling cmd for related
943 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700944 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
945 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700946 if(rc != OK) {
947 rc = NO_MEMORY;
948 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
949 return NO_MEMORY;
950 }
951
952 //Map memory for related cam sync buffer
953 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
955 m_pDualCamCmdHeap->getFd(0),
956 sizeof(cam_dual_camera_cmd_info_t),
957 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700958 if(rc < 0) {
959 LOGE("Dualcam: failed to map Related cam sync buffer");
960 rc = FAILED_TRANSACTION;
961 return NO_MEMORY;
962 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700963 m_pDualCamCmdPtr =
964 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700965 }
966
967 LOGH("mCameraId=%d",mCameraId);
968
969 return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION : closeCamera
974 *
975 * DESCRIPTION: close camera
976 *
977 * PARAMETERS : none
978 *
979 * RETURN : int32_t type of status
980 * NO_ERROR -- success
981 * none-zero failure code
982 *==========================================================================*/
983int QCamera3HardwareInterface::closeCamera()
984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700986 int rc = NO_ERROR;
987 char value[PROPERTY_VALUE_MAX];
988
989 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
990 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991
992 // unmap memory for related cam sync buffer
993 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800994 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 if (NULL != m_pDualCamCmdHeap) {
996 m_pDualCamCmdHeap->deallocate();
997 delete m_pDualCamCmdHeap;
998 m_pDualCamCmdHeap = NULL;
999 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001000 }
1001
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1003 mCameraHandle = NULL;
1004
1005 //reset session id to some invalid id
1006 pthread_mutex_lock(&gCamLock);
1007 sessionId[mCameraId] = 0xDEADBEEF;
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 //Notify display HAL that there is no active camera session
1011 //but avoid calling the same during bootup. Refer to openCamera
1012 //for more details.
1013 property_get("service.bootanim.exit", value, "0");
1014 if (atoi(value) == 1) {
1015 pthread_mutex_lock(&gCamLock);
1016 if (--gNumCameraSessions == 0) {
1017 setCameraLaunchStatus(false);
1018 }
1019 pthread_mutex_unlock(&gCamLock);
1020 }
1021
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 if (mExifParams.debug_params) {
1023 free(mExifParams.debug_params);
1024 mExifParams.debug_params = NULL;
1025 }
1026 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1027 LOGW("Failed to release flash for camera id: %d",
1028 mCameraId);
1029 }
1030 mState = CLOSED;
1031 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1032 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001033
1034 if (gHdrPlusClient != nullptr) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001035 // Disable HDR+ mode.
1036 disableHdrPlusModeLocked();
1037 // Disconnect Easel if it's connected.
1038 pthread_mutex_lock(&gCamLock);
1039 if (gEaselConnected) {
1040 gHdrPlusClient->disconnect();
1041 gEaselConnected = false;
1042 }
1043 pthread_mutex_unlock(&gCamLock);
1044
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001045 rc = gHdrPlusClient->suspendEasel();
1046 if (rc != 0) {
1047 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1048 }
1049 }
1050
Thierry Strudel3d639192016-09-09 11:52:26 -07001051 return rc;
1052}
1053
1054/*===========================================================================
1055 * FUNCTION : initialize
1056 *
1057 * DESCRIPTION: Initialize frameworks callback functions
1058 *
1059 * PARAMETERS :
1060 * @callback_ops : callback function to frameworks
1061 *
1062 * RETURN :
1063 *
1064 *==========================================================================*/
1065int QCamera3HardwareInterface::initialize(
1066 const struct camera3_callback_ops *callback_ops)
1067{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001068 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001069 int rc;
1070
1071 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1072 pthread_mutex_lock(&mMutex);
1073
1074 // Validate current state
1075 switch (mState) {
1076 case OPENED:
1077 /* valid state */
1078 break;
1079 default:
1080 LOGE("Invalid state %d", mState);
1081 rc = -ENODEV;
1082 goto err1;
1083 }
1084
1085 rc = initParameters();
1086 if (rc < 0) {
1087 LOGE("initParamters failed %d", rc);
1088 goto err1;
1089 }
1090 mCallbackOps = callback_ops;
1091
1092 mChannelHandle = mCameraHandle->ops->add_channel(
1093 mCameraHandle->camera_handle, NULL, NULL, this);
1094 if (mChannelHandle == 0) {
1095 LOGE("add_channel failed");
1096 rc = -ENOMEM;
1097 pthread_mutex_unlock(&mMutex);
1098 return rc;
1099 }
1100
1101 pthread_mutex_unlock(&mMutex);
1102 mCameraInitialized = true;
1103 mState = INITIALIZED;
1104 LOGI("X");
1105 return 0;
1106
1107err1:
1108 pthread_mutex_unlock(&mMutex);
1109 return rc;
1110}
1111
1112/*===========================================================================
1113 * FUNCTION : validateStreamDimensions
1114 *
1115 * DESCRIPTION: Check if the configuration requested are those advertised
1116 *
1117 * PARAMETERS :
1118 * @stream_list : streams to be configured
1119 *
1120 * RETURN :
1121 *
1122 *==========================================================================*/
1123int QCamera3HardwareInterface::validateStreamDimensions(
1124 camera3_stream_configuration_t *streamList)
1125{
1126 int rc = NO_ERROR;
1127 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001128 uint32_t depthWidth = 0;
1129 uint32_t depthHeight = 0;
1130 if (mPDSupported) {
1131 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1132 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1133 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001134
1135 camera3_stream_t *inputStream = NULL;
1136 /*
1137 * Loop through all streams to find input stream if it exists*
1138 */
1139 for (size_t i = 0; i< streamList->num_streams; i++) {
1140 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1141 if (inputStream != NULL) {
1142 LOGE("Error, Multiple input streams requested");
1143 return -EINVAL;
1144 }
1145 inputStream = streamList->streams[i];
1146 }
1147 }
1148 /*
1149 * Loop through all streams requested in configuration
1150 * Check if unsupported sizes have been requested on any of them
1151 */
1152 for (size_t j = 0; j < streamList->num_streams; j++) {
1153 bool sizeFound = false;
1154 camera3_stream_t *newStream = streamList->streams[j];
1155
1156 uint32_t rotatedHeight = newStream->height;
1157 uint32_t rotatedWidth = newStream->width;
1158 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1159 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1160 rotatedHeight = newStream->width;
1161 rotatedWidth = newStream->height;
1162 }
1163
1164 /*
1165 * Sizes are different for each type of stream format check against
1166 * appropriate table.
1167 */
1168 switch (newStream->format) {
1169 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1170 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1171 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001172 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1173 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1174 mPDSupported) {
1175 if ((depthWidth == newStream->width) &&
1176 (depthHeight == newStream->height)) {
1177 sizeFound = true;
1178 }
1179 break;
1180 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001181 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1182 for (size_t i = 0; i < count; i++) {
1183 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1184 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1185 sizeFound = true;
1186 break;
1187 }
1188 }
1189 break;
1190 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1192 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001193 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001194 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001195 if ((depthSamplesCount == newStream->width) &&
1196 (1 == newStream->height)) {
1197 sizeFound = true;
1198 }
1199 break;
1200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001201 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1202 /* Verify set size against generated sizes table */
1203 for (size_t i = 0; i < count; i++) {
1204 if (((int32_t)rotatedWidth ==
1205 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1206 ((int32_t)rotatedHeight ==
1207 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1208 sizeFound = true;
1209 break;
1210 }
1211 }
1212 break;
1213 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1214 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1215 default:
1216 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1217 || newStream->stream_type == CAMERA3_STREAM_INPUT
1218 || IS_USAGE_ZSL(newStream->usage)) {
1219 if (((int32_t)rotatedWidth ==
1220 gCamCapability[mCameraId]->active_array_size.width) &&
1221 ((int32_t)rotatedHeight ==
1222 gCamCapability[mCameraId]->active_array_size.height)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 /* We could potentially break here to enforce ZSL stream
1227 * set from frameworks always is full active array size
1228 * but it is not clear from the spc if framework will always
1229 * follow that, also we have logic to override to full array
1230 * size, so keeping the logic lenient at the moment
1231 */
1232 }
1233 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1234 MAX_SIZES_CNT);
1235 for (size_t i = 0; i < count; i++) {
1236 if (((int32_t)rotatedWidth ==
1237 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1238 ((int32_t)rotatedHeight ==
1239 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1240 sizeFound = true;
1241 break;
1242 }
1243 }
1244 break;
1245 } /* End of switch(newStream->format) */
1246
1247 /* We error out even if a single stream has unsupported size set */
1248 if (!sizeFound) {
1249 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1250 rotatedWidth, rotatedHeight, newStream->format,
1251 gCamCapability[mCameraId]->active_array_size.width,
1252 gCamCapability[mCameraId]->active_array_size.height);
1253 rc = -EINVAL;
1254 break;
1255 }
1256 } /* End of for each stream */
1257 return rc;
1258}
1259
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001260/*===========================================================================
1261 * FUNCTION : validateUsageFlags
1262 *
1263 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1264 *
1265 * PARAMETERS :
1266 * @stream_list : streams to be configured
1267 *
1268 * RETURN :
1269 * NO_ERROR if the usage flags are supported
1270 * error code if usage flags are not supported
1271 *
1272 *==========================================================================*/
1273int QCamera3HardwareInterface::validateUsageFlags(
1274 const camera3_stream_configuration_t* streamList)
1275{
1276 for (size_t j = 0; j < streamList->num_streams; j++) {
1277 const camera3_stream_t *newStream = streamList->streams[j];
1278
1279 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1280 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1281 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1282 continue;
1283 }
1284
1285 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1286 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1287 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1288 bool forcePreviewUBWC = true;
1289 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1290 forcePreviewUBWC = false;
1291 }
1292 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1293 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1294 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1295 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1296 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1297 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1298
1299 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1300 // So color spaces will always match.
1301
1302 // Check whether underlying formats of shared streams match.
1303 if (isVideo && isPreview && videoFormat != previewFormat) {
1304 LOGE("Combined video and preview usage flag is not supported");
1305 return -EINVAL;
1306 }
1307 if (isPreview && isZSL && previewFormat != zslFormat) {
1308 LOGE("Combined preview and zsl usage flag is not supported");
1309 return -EINVAL;
1310 }
1311 if (isVideo && isZSL && videoFormat != zslFormat) {
1312 LOGE("Combined video and zsl usage flag is not supported");
1313 return -EINVAL;
1314 }
1315 }
1316 return NO_ERROR;
1317}
1318
1319/*===========================================================================
1320 * FUNCTION : validateUsageFlagsForEis
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlagsForEis(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1339 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1340
1341 // Because EIS is "hard-coded" for certain use case, and current
1342 // implementation doesn't support shared preview and video on the same
1343 // stream, return failure if EIS is forced on.
1344 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1345 LOGE("Combined video and preview usage flag is not supported due to EIS");
1346 return -EINVAL;
1347 }
1348 }
1349 return NO_ERROR;
1350}
1351
Thierry Strudel3d639192016-09-09 11:52:26 -07001352/*==============================================================================
1353 * FUNCTION : isSupportChannelNeeded
1354 *
1355 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1356 *
1357 * PARAMETERS :
1358 * @stream_list : streams to be configured
1359 * @stream_config_info : the config info for streams to be configured
1360 *
1361 * RETURN : Boolen true/false decision
1362 *
1363 *==========================================================================*/
1364bool QCamera3HardwareInterface::isSupportChannelNeeded(
1365 camera3_stream_configuration_t *streamList,
1366 cam_stream_size_info_t stream_config_info)
1367{
1368 uint32_t i;
1369 bool pprocRequested = false;
1370 /* Check for conditions where PProc pipeline does not have any streams*/
1371 for (i = 0; i < stream_config_info.num_streams; i++) {
1372 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1373 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1374 pprocRequested = true;
1375 break;
1376 }
1377 }
1378
1379 if (pprocRequested == false )
1380 return true;
1381
1382 /* Dummy stream needed if only raw or jpeg streams present */
1383 for (i = 0; i < streamList->num_streams; i++) {
1384 switch(streamList->streams[i]->format) {
1385 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1386 case HAL_PIXEL_FORMAT_RAW10:
1387 case HAL_PIXEL_FORMAT_RAW16:
1388 case HAL_PIXEL_FORMAT_BLOB:
1389 break;
1390 default:
1391 return false;
1392 }
1393 }
1394 return true;
1395}
1396
1397/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001398 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001399 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001400 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001401 *
1402 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001403 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001404 *
1405 * RETURN : int32_t type of status
1406 * NO_ERROR -- success
1407 * none-zero failure code
1408 *
1409 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001410int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001411{
1412 int32_t rc = NO_ERROR;
1413
1414 cam_dimension_t max_dim = {0, 0};
1415 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1416 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1417 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1418 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1419 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1420 }
1421
1422 clear_metadata_buffer(mParameters);
1423
1424 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1425 max_dim);
1426 if (rc != NO_ERROR) {
1427 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1428 return rc;
1429 }
1430
1431 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1432 if (rc != NO_ERROR) {
1433 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1434 return rc;
1435 }
1436
1437 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001439
1440 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1441 mParameters);
1442 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 return rc;
1445 }
1446
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001447 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001448 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1449 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1450 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1451 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1452 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001453
1454 return rc;
1455}
1456
1457/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 * FUNCTION : addToPPFeatureMask
1459 *
1460 * DESCRIPTION: add additional features to pp feature mask based on
1461 * stream type and usecase
1462 *
1463 * PARAMETERS :
1464 * @stream_format : stream type for feature mask
1465 * @stream_idx : stream idx within postprocess_mask list to change
1466 *
1467 * RETURN : NULL
1468 *
1469 *==========================================================================*/
1470void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1471 uint32_t stream_idx)
1472{
1473 char feature_mask_value[PROPERTY_VALUE_MAX];
1474 cam_feature_mask_t feature_mask;
1475 int args_converted;
1476 int property_len;
1477
1478 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001479#ifdef _LE_CAMERA_
1480 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1481 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1482 property_len = property_get("persist.camera.hal3.feature",
1483 feature_mask_value, swtnr_feature_mask_value);
1484#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001485 property_len = property_get("persist.camera.hal3.feature",
1486 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1489 (feature_mask_value[1] == 'x')) {
1490 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1491 } else {
1492 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1493 }
1494 if (1 != args_converted) {
1495 feature_mask = 0;
1496 LOGE("Wrong feature mask %s", feature_mask_value);
1497 return;
1498 }
1499
1500 switch (stream_format) {
1501 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1502 /* Add LLVD to pp feature mask only if video hint is enabled */
1503 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1504 mStreamConfigInfo.postprocess_mask[stream_idx]
1505 |= CAM_QTI_FEATURE_SW_TNR;
1506 LOGH("Added SW TNR to pp feature mask");
1507 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1508 mStreamConfigInfo.postprocess_mask[stream_idx]
1509 |= CAM_QCOM_FEATURE_LLVD;
1510 LOGH("Added LLVD SeeMore to pp feature mask");
1511 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001512 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1513 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1514 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1515 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001516 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1517 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1518 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1519 CAM_QTI_FEATURE_BINNING_CORRECTION;
1520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001521 break;
1522 }
1523 default:
1524 break;
1525 }
1526 LOGD("PP feature mask %llx",
1527 mStreamConfigInfo.postprocess_mask[stream_idx]);
1528}
1529
1530/*==============================================================================
1531 * FUNCTION : updateFpsInPreviewBuffer
1532 *
1533 * DESCRIPTION: update FPS information in preview buffer.
1534 *
1535 * PARAMETERS :
1536 * @metadata : pointer to metadata buffer
1537 * @frame_number: frame_number to look for in pending buffer list
1538 *
1539 * RETURN : None
1540 *
1541 *==========================================================================*/
1542void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1543 uint32_t frame_number)
1544{
1545 // Mark all pending buffers for this particular request
1546 // with corresponding framerate information
1547 for (List<PendingBuffersInRequest>::iterator req =
1548 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1549 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1550 for(List<PendingBufferInfo>::iterator j =
1551 req->mPendingBufferList.begin();
1552 j != req->mPendingBufferList.end(); j++) {
1553 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1554 if ((req->frame_number == frame_number) &&
1555 (channel->getStreamTypeMask() &
1556 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1557 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1558 CAM_INTF_PARM_FPS_RANGE, metadata) {
1559 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1560 struct private_handle_t *priv_handle =
1561 (struct private_handle_t *)(*(j->buffer));
1562 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1563 }
1564 }
1565 }
1566 }
1567}
1568
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001569/*==============================================================================
1570 * FUNCTION : updateTimeStampInPendingBuffers
1571 *
1572 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1573 * of a frame number
1574 *
1575 * PARAMETERS :
1576 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1577 * @timestamp : timestamp to be set
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1583 uint32_t frameNumber, nsecs_t timestamp)
1584{
1585 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1586 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1587 if (req->frame_number != frameNumber)
1588 continue;
1589
1590 for (auto k = req->mPendingBufferList.begin();
1591 k != req->mPendingBufferList.end(); k++ ) {
1592 struct private_handle_t *priv_handle =
1593 (struct private_handle_t *) (*(k->buffer));
1594 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1595 }
1596 }
1597 return;
1598}
1599
Thierry Strudel3d639192016-09-09 11:52:26 -07001600/*===========================================================================
1601 * FUNCTION : configureStreams
1602 *
1603 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1604 * and output streams.
1605 *
1606 * PARAMETERS :
1607 * @stream_list : streams to be configured
1608 *
1609 * RETURN :
1610 *
1611 *==========================================================================*/
1612int QCamera3HardwareInterface::configureStreams(
1613 camera3_stream_configuration_t *streamList)
1614{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001615 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 int rc = 0;
1617
1618 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001619 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001621 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001622
1623 return rc;
1624}
1625
1626/*===========================================================================
1627 * FUNCTION : configureStreamsPerfLocked
1628 *
1629 * DESCRIPTION: configureStreams while perfLock is held.
1630 *
1631 * PARAMETERS :
1632 * @stream_list : streams to be configured
1633 *
1634 * RETURN : int32_t type of status
1635 * NO_ERROR -- success
1636 * none-zero failure code
1637 *==========================================================================*/
1638int QCamera3HardwareInterface::configureStreamsPerfLocked(
1639 camera3_stream_configuration_t *streamList)
1640{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001641 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001642 int rc = 0;
1643
1644 // Sanity check stream_list
1645 if (streamList == NULL) {
1646 LOGE("NULL stream configuration");
1647 return BAD_VALUE;
1648 }
1649 if (streamList->streams == NULL) {
1650 LOGE("NULL stream list");
1651 return BAD_VALUE;
1652 }
1653
1654 if (streamList->num_streams < 1) {
1655 LOGE("Bad number of streams requested: %d",
1656 streamList->num_streams);
1657 return BAD_VALUE;
1658 }
1659
1660 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1661 LOGE("Maximum number of streams %d exceeded: %d",
1662 MAX_NUM_STREAMS, streamList->num_streams);
1663 return BAD_VALUE;
1664 }
1665
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001666 rc = validateUsageFlags(streamList);
1667 if (rc != NO_ERROR) {
1668 return rc;
1669 }
1670
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 mOpMode = streamList->operation_mode;
1672 LOGD("mOpMode: %d", mOpMode);
1673
1674 /* first invalidate all the steams in the mStreamList
1675 * if they appear again, they will be validated */
1676 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1677 it != mStreamInfo.end(); it++) {
1678 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1679 if (channel) {
1680 channel->stop();
1681 }
1682 (*it)->status = INVALID;
1683 }
1684
1685 if (mRawDumpChannel) {
1686 mRawDumpChannel->stop();
1687 delete mRawDumpChannel;
1688 mRawDumpChannel = NULL;
1689 }
1690
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001691 if (mHdrPlusRawSrcChannel) {
1692 mHdrPlusRawSrcChannel->stop();
1693 delete mHdrPlusRawSrcChannel;
1694 mHdrPlusRawSrcChannel = NULL;
1695 }
1696
Thierry Strudel3d639192016-09-09 11:52:26 -07001697 if (mSupportChannel)
1698 mSupportChannel->stop();
1699
1700 if (mAnalysisChannel) {
1701 mAnalysisChannel->stop();
1702 }
1703 if (mMetadataChannel) {
1704 /* If content of mStreamInfo is not 0, there is metadata stream */
1705 mMetadataChannel->stop();
1706 }
1707 if (mChannelHandle) {
1708 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1709 mChannelHandle);
1710 LOGD("stopping channel %d", mChannelHandle);
1711 }
1712
1713 pthread_mutex_lock(&mMutex);
1714
1715 // Check state
1716 switch (mState) {
1717 case INITIALIZED:
1718 case CONFIGURED:
1719 case STARTED:
1720 /* valid state */
1721 break;
1722 default:
1723 LOGE("Invalid state %d", mState);
1724 pthread_mutex_unlock(&mMutex);
1725 return -ENODEV;
1726 }
1727
1728 /* Check whether we have video stream */
1729 m_bIs4KVideo = false;
1730 m_bIsVideo = false;
1731 m_bEisSupportedSize = false;
1732 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001733 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001734 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001735 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001736 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 uint32_t videoWidth = 0U;
1738 uint32_t videoHeight = 0U;
1739 size_t rawStreamCnt = 0;
1740 size_t stallStreamCnt = 0;
1741 size_t processedStreamCnt = 0;
1742 // Number of streams on ISP encoder path
1743 size_t numStreamsOnEncoder = 0;
1744 size_t numYuv888OnEncoder = 0;
1745 bool bYuv888OverrideJpeg = false;
1746 cam_dimension_t largeYuv888Size = {0, 0};
1747 cam_dimension_t maxViewfinderSize = {0, 0};
1748 bool bJpegExceeds4K = false;
1749 bool bJpegOnEncoder = false;
1750 bool bUseCommonFeatureMask = false;
1751 cam_feature_mask_t commonFeatureMask = 0;
1752 bool bSmallJpegSize = false;
1753 uint32_t width_ratio;
1754 uint32_t height_ratio;
1755 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1756 camera3_stream_t *inputStream = NULL;
1757 bool isJpeg = false;
1758 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001759 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001760 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001761
1762 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1763
1764 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001765 uint8_t eis_prop_set;
1766 uint32_t maxEisWidth = 0;
1767 uint32_t maxEisHeight = 0;
1768
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001769 // Initialize all instant AEC related variables
1770 mInstantAEC = false;
1771 mResetInstantAEC = false;
1772 mInstantAECSettledFrameNumber = 0;
1773 mAecSkipDisplayFrameBound = 0;
1774 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001775 mCurrFeatureState = 0;
1776 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001777
Thierry Strudel3d639192016-09-09 11:52:26 -07001778 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1779
1780 size_t count = IS_TYPE_MAX;
1781 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1782 for (size_t i = 0; i < count; i++) {
1783 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001784 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1785 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001786 break;
1787 }
1788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001789
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001790 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001791 maxEisWidth = MAX_EIS_WIDTH;
1792 maxEisHeight = MAX_EIS_HEIGHT;
1793 }
1794
1795 /* EIS setprop control */
1796 char eis_prop[PROPERTY_VALUE_MAX];
1797 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001798 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 eis_prop_set = (uint8_t)atoi(eis_prop);
1800
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001801 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001802 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1803
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001804 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1805 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001806
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 /* stream configurations */
1808 for (size_t i = 0; i < streamList->num_streams; i++) {
1809 camera3_stream_t *newStream = streamList->streams[i];
1810 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1811 "height = %d, rotation = %d, usage = 0x%x",
1812 i, newStream->stream_type, newStream->format,
1813 newStream->width, newStream->height, newStream->rotation,
1814 newStream->usage);
1815 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1816 newStream->stream_type == CAMERA3_STREAM_INPUT){
1817 isZsl = true;
1818 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001819 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1820 IS_USAGE_PREVIEW(newStream->usage)) {
1821 isPreview = true;
1822 }
1823
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1825 inputStream = newStream;
1826 }
1827
Emilian Peev7650c122017-01-19 08:24:33 -08001828 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1829 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 isJpeg = true;
1831 jpegSize.width = newStream->width;
1832 jpegSize.height = newStream->height;
1833 if (newStream->width > VIDEO_4K_WIDTH ||
1834 newStream->height > VIDEO_4K_HEIGHT)
1835 bJpegExceeds4K = true;
1836 }
1837
1838 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1839 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1840 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001841 // In HAL3 we can have multiple different video streams.
1842 // The variables video width and height are used below as
1843 // dimensions of the biggest of them
1844 if (videoWidth < newStream->width ||
1845 videoHeight < newStream->height) {
1846 videoWidth = newStream->width;
1847 videoHeight = newStream->height;
1848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1850 (VIDEO_4K_HEIGHT <= newStream->height)) {
1851 m_bIs4KVideo = true;
1852 }
1853 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1854 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001855
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 }
1857 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1858 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1859 switch (newStream->format) {
1860 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001861 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1862 depthPresent = true;
1863 break;
1864 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 stallStreamCnt++;
1866 if (isOnEncoder(maxViewfinderSize, newStream->width,
1867 newStream->height)) {
1868 numStreamsOnEncoder++;
1869 bJpegOnEncoder = true;
1870 }
1871 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1872 newStream->width);
1873 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1874 newStream->height);;
1875 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1876 "FATAL: max_downscale_factor cannot be zero and so assert");
1877 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1878 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1879 LOGH("Setting small jpeg size flag to true");
1880 bSmallJpegSize = true;
1881 }
1882 break;
1883 case HAL_PIXEL_FORMAT_RAW10:
1884 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1885 case HAL_PIXEL_FORMAT_RAW16:
1886 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1888 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1889 pdStatCount++;
1890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 break;
1892 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1893 processedStreamCnt++;
1894 if (isOnEncoder(maxViewfinderSize, newStream->width,
1895 newStream->height)) {
1896 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1897 !IS_USAGE_ZSL(newStream->usage)) {
1898 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900 numStreamsOnEncoder++;
1901 }
1902 break;
1903 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1904 processedStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 // If Yuv888 size is not greater than 4K, set feature mask
1908 // to SUPERSET so that it support concurrent request on
1909 // YUV and JPEG.
1910 if (newStream->width <= VIDEO_4K_WIDTH &&
1911 newStream->height <= VIDEO_4K_HEIGHT) {
1912 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 }
1914 numStreamsOnEncoder++;
1915 numYuv888OnEncoder++;
1916 largeYuv888Size.width = newStream->width;
1917 largeYuv888Size.height = newStream->height;
1918 }
1919 break;
1920 default:
1921 processedStreamCnt++;
1922 if (isOnEncoder(maxViewfinderSize, newStream->width,
1923 newStream->height)) {
1924 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1925 numStreamsOnEncoder++;
1926 }
1927 break;
1928 }
1929
1930 }
1931 }
1932
1933 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1934 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1935 !m_bIsVideo) {
1936 m_bEisEnable = false;
1937 }
1938
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001939 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1940 pthread_mutex_unlock(&mMutex);
1941 return -EINVAL;
1942 }
1943
Thierry Strudel54dc9782017-02-15 12:12:10 -08001944 uint8_t forceEnableTnr = 0;
1945 char tnr_prop[PROPERTY_VALUE_MAX];
1946 memset(tnr_prop, 0, sizeof(tnr_prop));
1947 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1948 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 /* Logic to enable/disable TNR based on specific config size/etc.*/
1951 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1952 ((videoWidth == 1920 && videoHeight == 1080) ||
1953 (videoWidth == 1280 && videoHeight == 720)) &&
1954 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1955 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001956 else if (forceEnableTnr)
1957 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001958
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001959 char videoHdrProp[PROPERTY_VALUE_MAX];
1960 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1961 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1962 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1963
1964 if (hdr_mode_prop == 1 && m_bIsVideo &&
1965 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1966 m_bVideoHdrEnabled = true;
1967 else
1968 m_bVideoHdrEnabled = false;
1969
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 /* Check if num_streams is sane */
1972 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1973 rawStreamCnt > MAX_RAW_STREAMS ||
1974 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1975 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1976 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1977 pthread_mutex_unlock(&mMutex);
1978 return -EINVAL;
1979 }
1980 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001981 if (isZsl && m_bIs4KVideo) {
1982 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 pthread_mutex_unlock(&mMutex);
1984 return -EINVAL;
1985 }
1986 /* Check if stream sizes are sane */
1987 if (numStreamsOnEncoder > 2) {
1988 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1989 pthread_mutex_unlock(&mMutex);
1990 return -EINVAL;
1991 } else if (1 < numStreamsOnEncoder){
1992 bUseCommonFeatureMask = true;
1993 LOGH("Multiple streams above max viewfinder size, common mask needed");
1994 }
1995
1996 /* Check if BLOB size is greater than 4k in 4k recording case */
1997 if (m_bIs4KVideo && bJpegExceeds4K) {
1998 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Emilian Peev7650c122017-01-19 08:24:33 -08002003 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2004 depthPresent) {
2005 LOGE("HAL doesn't support depth streams in HFR mode!");
2006 pthread_mutex_unlock(&mMutex);
2007 return -EINVAL;
2008 }
2009
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2011 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2012 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2013 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2014 // configurations:
2015 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2016 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2017 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2018 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2019 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2020 __func__);
2021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024
2025 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2026 // the YUV stream's size is greater or equal to the JPEG size, set common
2027 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2028 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2029 jpegSize.width, jpegSize.height) &&
2030 largeYuv888Size.width > jpegSize.width &&
2031 largeYuv888Size.height > jpegSize.height) {
2032 bYuv888OverrideJpeg = true;
2033 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2034 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035 }
2036
2037 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2038 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2039 commonFeatureMask);
2040 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2041 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2042
2043 rc = validateStreamDimensions(streamList);
2044 if (rc == NO_ERROR) {
2045 rc = validateStreamRotations(streamList);
2046 }
2047 if (rc != NO_ERROR) {
2048 LOGE("Invalid stream configuration requested!");
2049 pthread_mutex_unlock(&mMutex);
2050 return rc;
2051 }
2052
Emilian Peev0f3c3162017-03-15 12:57:46 +00002053 if (1 < pdStatCount) {
2054 LOGE("HAL doesn't support multiple PD streams");
2055 pthread_mutex_unlock(&mMutex);
2056 return -EINVAL;
2057 }
2058
2059 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2060 (1 == pdStatCount)) {
2061 LOGE("HAL doesn't support PD streams in HFR mode!");
2062 pthread_mutex_unlock(&mMutex);
2063 return -EINVAL;
2064 }
2065
Thierry Strudel3d639192016-09-09 11:52:26 -07002066 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2067 for (size_t i = 0; i < streamList->num_streams; i++) {
2068 camera3_stream_t *newStream = streamList->streams[i];
2069 LOGH("newStream type = %d, stream format = %d "
2070 "stream size : %d x %d, stream rotation = %d",
2071 newStream->stream_type, newStream->format,
2072 newStream->width, newStream->height, newStream->rotation);
2073 //if the stream is in the mStreamList validate it
2074 bool stream_exists = false;
2075 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2076 it != mStreamInfo.end(); it++) {
2077 if ((*it)->stream == newStream) {
2078 QCamera3ProcessingChannel *channel =
2079 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2080 stream_exists = true;
2081 if (channel)
2082 delete channel;
2083 (*it)->status = VALID;
2084 (*it)->stream->priv = NULL;
2085 (*it)->channel = NULL;
2086 }
2087 }
2088 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2089 //new stream
2090 stream_info_t* stream_info;
2091 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2092 if (!stream_info) {
2093 LOGE("Could not allocate stream info");
2094 rc = -ENOMEM;
2095 pthread_mutex_unlock(&mMutex);
2096 return rc;
2097 }
2098 stream_info->stream = newStream;
2099 stream_info->status = VALID;
2100 stream_info->channel = NULL;
2101 mStreamInfo.push_back(stream_info);
2102 }
2103 /* Covers Opaque ZSL and API1 F/W ZSL */
2104 if (IS_USAGE_ZSL(newStream->usage)
2105 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2106 if (zslStream != NULL) {
2107 LOGE("Multiple input/reprocess streams requested!");
2108 pthread_mutex_unlock(&mMutex);
2109 return BAD_VALUE;
2110 }
2111 zslStream = newStream;
2112 }
2113 /* Covers YUV reprocess */
2114 if (inputStream != NULL) {
2115 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2116 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2117 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2118 && inputStream->width == newStream->width
2119 && inputStream->height == newStream->height) {
2120 if (zslStream != NULL) {
2121 /* This scenario indicates multiple YUV streams with same size
2122 * as input stream have been requested, since zsl stream handle
2123 * is solely use for the purpose of overriding the size of streams
2124 * which share h/w streams we will just make a guess here as to
2125 * which of the stream is a ZSL stream, this will be refactored
2126 * once we make generic logic for streams sharing encoder output
2127 */
2128 LOGH("Warning, Multiple ip/reprocess streams requested!");
2129 }
2130 zslStream = newStream;
2131 }
2132 }
2133 }
2134
2135 /* If a zsl stream is set, we know that we have configured at least one input or
2136 bidirectional stream */
2137 if (NULL != zslStream) {
2138 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2139 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2140 mInputStreamInfo.format = zslStream->format;
2141 mInputStreamInfo.usage = zslStream->usage;
2142 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2143 mInputStreamInfo.dim.width,
2144 mInputStreamInfo.dim.height,
2145 mInputStreamInfo.format, mInputStreamInfo.usage);
2146 }
2147
2148 cleanAndSortStreamInfo();
2149 if (mMetadataChannel) {
2150 delete mMetadataChannel;
2151 mMetadataChannel = NULL;
2152 }
2153 if (mSupportChannel) {
2154 delete mSupportChannel;
2155 mSupportChannel = NULL;
2156 }
2157
2158 if (mAnalysisChannel) {
2159 delete mAnalysisChannel;
2160 mAnalysisChannel = NULL;
2161 }
2162
2163 if (mDummyBatchChannel) {
2164 delete mDummyBatchChannel;
2165 mDummyBatchChannel = NULL;
2166 }
2167
Emilian Peev7650c122017-01-19 08:24:33 -08002168 if (mDepthChannel) {
2169 mDepthChannel = NULL;
2170 }
2171
Thierry Strudel2896d122017-02-23 19:18:03 -08002172 char is_type_value[PROPERTY_VALUE_MAX];
2173 property_get("persist.camera.is_type", is_type_value, "4");
2174 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2175
Thierry Strudel3d639192016-09-09 11:52:26 -07002176 //Create metadata channel and initialize it
2177 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2178 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2179 gCamCapability[mCameraId]->color_arrangement);
2180 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2181 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002182 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 if (mMetadataChannel == NULL) {
2184 LOGE("failed to allocate metadata channel");
2185 rc = -ENOMEM;
2186 pthread_mutex_unlock(&mMutex);
2187 return rc;
2188 }
2189 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2190 if (rc < 0) {
2191 LOGE("metadata channel initialization failed");
2192 delete mMetadataChannel;
2193 mMetadataChannel = NULL;
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Thierry Strudel2896d122017-02-23 19:18:03 -08002198 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002199 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002200 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002201 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2202 /* Allocate channel objects for the requested streams */
2203 for (size_t i = 0; i < streamList->num_streams; i++) {
2204 camera3_stream_t *newStream = streamList->streams[i];
2205 uint32_t stream_usage = newStream->usage;
2206 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2207 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2208 struct camera_info *p_info = NULL;
2209 pthread_mutex_lock(&gCamLock);
2210 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2211 pthread_mutex_unlock(&gCamLock);
2212 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2213 || IS_USAGE_ZSL(newStream->usage)) &&
2214 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002215 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002216 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002217 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2218 if (bUseCommonFeatureMask)
2219 zsl_ppmask = commonFeatureMask;
2220 else
2221 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002222 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002223 if (numStreamsOnEncoder > 0)
2224 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2225 else
2226 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002228 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002230 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002231 LOGH("Input stream configured, reprocess config");
2232 } else {
2233 //for non zsl streams find out the format
2234 switch (newStream->format) {
2235 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2236 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002237 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002238 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2239 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2240 /* add additional features to pp feature mask */
2241 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2242 mStreamConfigInfo.num_streams);
2243
2244 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2246 CAM_STREAM_TYPE_VIDEO;
2247 if (m_bTnrEnabled && m_bTnrVideo) {
2248 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2249 CAM_QCOM_FEATURE_CPP_TNR;
2250 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2251 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2252 ~CAM_QCOM_FEATURE_CDS;
2253 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002254 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2255 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2256 CAM_QTI_FEATURE_PPEISCORE;
2257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 } else {
2259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2260 CAM_STREAM_TYPE_PREVIEW;
2261 if (m_bTnrEnabled && m_bTnrPreview) {
2262 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2263 CAM_QCOM_FEATURE_CPP_TNR;
2264 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2265 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2266 ~CAM_QCOM_FEATURE_CDS;
2267 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002268 if(!m_bSwTnrPreview) {
2269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2270 ~CAM_QTI_FEATURE_SW_TNR;
2271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 padding_info.width_padding = mSurfaceStridePadding;
2273 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002274 previewSize.width = (int32_t)newStream->width;
2275 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 }
2277 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2278 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2279 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2280 newStream->height;
2281 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2282 newStream->width;
2283 }
2284 }
2285 break;
2286 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002287 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2289 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2290 if (bUseCommonFeatureMask)
2291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2292 commonFeatureMask;
2293 else
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2295 CAM_QCOM_FEATURE_NONE;
2296 } else {
2297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2298 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2299 }
2300 break;
2301 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002302 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2304 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2305 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2307 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 /* Remove rotation if it is not supported
2309 for 4K LiveVideo snapshot case (online processing) */
2310 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2311 CAM_QCOM_FEATURE_ROTATION)) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2313 &= ~CAM_QCOM_FEATURE_ROTATION;
2314 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 } else {
2316 if (bUseCommonFeatureMask &&
2317 isOnEncoder(maxViewfinderSize, newStream->width,
2318 newStream->height)) {
2319 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2320 } else {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2322 }
2323 }
2324 if (isZsl) {
2325 if (zslStream) {
2326 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2327 (int32_t)zslStream->width;
2328 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2329 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 } else {
2333 LOGE("Error, No ZSL stream identified");
2334 pthread_mutex_unlock(&mMutex);
2335 return -EINVAL;
2336 }
2337 } else if (m_bIs4KVideo) {
2338 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2339 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2340 } else if (bYuv888OverrideJpeg) {
2341 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2342 (int32_t)largeYuv888Size.width;
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2344 (int32_t)largeYuv888Size.height;
2345 }
2346 break;
2347 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2348 case HAL_PIXEL_FORMAT_RAW16:
2349 case HAL_PIXEL_FORMAT_RAW10:
2350 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2351 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2352 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002353 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2354 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2355 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2356 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2357 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2358 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2359 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2360 gCamCapability[mCameraId]->dt[mPDIndex];
2361 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2362 gCamCapability[mCameraId]->vc[mPDIndex];
2363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 break;
2365 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2369 break;
2370 }
2371 }
2372
2373 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2374 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2375 gCamCapability[mCameraId]->color_arrangement);
2376
2377 if (newStream->priv == NULL) {
2378 //New stream, construct channel
2379 switch (newStream->stream_type) {
2380 case CAMERA3_STREAM_INPUT:
2381 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2382 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2383 break;
2384 case CAMERA3_STREAM_BIDIRECTIONAL:
2385 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2386 GRALLOC_USAGE_HW_CAMERA_WRITE;
2387 break;
2388 case CAMERA3_STREAM_OUTPUT:
2389 /* For video encoding stream, set read/write rarely
2390 * flag so that they may be set to un-cached */
2391 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2392 newStream->usage |=
2393 (GRALLOC_USAGE_SW_READ_RARELY |
2394 GRALLOC_USAGE_SW_WRITE_RARELY |
2395 GRALLOC_USAGE_HW_CAMERA_WRITE);
2396 else if (IS_USAGE_ZSL(newStream->usage))
2397 {
2398 LOGD("ZSL usage flag skipping");
2399 }
2400 else if (newStream == zslStream
2401 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2402 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2403 } else
2404 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2405 break;
2406 default:
2407 LOGE("Invalid stream_type %d", newStream->stream_type);
2408 break;
2409 }
2410
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002411 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002412 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2413 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2414 QCamera3ProcessingChannel *channel = NULL;
2415 switch (newStream->format) {
2416 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2417 if ((newStream->usage &
2418 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2419 (streamList->operation_mode ==
2420 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2421 ) {
2422 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2423 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002424 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002425 this,
2426 newStream,
2427 (cam_stream_type_t)
2428 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2430 mMetadataChannel,
2431 0); //heap buffers are not required for HFR video channel
2432 if (channel == NULL) {
2433 LOGE("allocation of channel failed");
2434 pthread_mutex_unlock(&mMutex);
2435 return -ENOMEM;
2436 }
2437 //channel->getNumBuffers() will return 0 here so use
2438 //MAX_INFLIGH_HFR_REQUESTS
2439 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2440 newStream->priv = channel;
2441 LOGI("num video buffers in HFR mode: %d",
2442 MAX_INFLIGHT_HFR_REQUESTS);
2443 } else {
2444 /* Copy stream contents in HFR preview only case to create
2445 * dummy batch channel so that sensor streaming is in
2446 * HFR mode */
2447 if (!m_bIsVideo && (streamList->operation_mode ==
2448 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2449 mDummyBatchStream = *newStream;
2450 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002451 int bufferCount = MAX_INFLIGHT_REQUESTS;
2452 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2453 CAM_STREAM_TYPE_VIDEO) {
2454 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2455 bufferCount = MAX_VIDEO_BUFFERS;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2458 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002459 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002460 this,
2461 newStream,
2462 (cam_stream_type_t)
2463 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2465 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002467 if (channel == NULL) {
2468 LOGE("allocation of channel failed");
2469 pthread_mutex_unlock(&mMutex);
2470 return -ENOMEM;
2471 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 /* disable UBWC for preview, though supported,
2473 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002474 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002475 (previewSize.width == (int32_t)videoWidth)&&
2476 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002477 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002479 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 newStream->max_buffers = channel->getNumBuffers();
2481 newStream->priv = channel;
2482 }
2483 break;
2484 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2485 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2486 mChannelHandle,
2487 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel);
2495 if (channel == NULL) {
2496 LOGE("allocation of YUV channel failed");
2497 pthread_mutex_unlock(&mMutex);
2498 return -ENOMEM;
2499 }
2500 newStream->max_buffers = channel->getNumBuffers();
2501 newStream->priv = channel;
2502 break;
2503 }
2504 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2505 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002506 case HAL_PIXEL_FORMAT_RAW10: {
2507 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2508 (HAL_DATASPACE_DEPTH != newStream->data_space))
2509 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mRawChannel = new QCamera3RawChannel(
2511 mCameraHandle->camera_handle, mChannelHandle,
2512 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002513 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002514 this, newStream,
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002516 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002517 if (mRawChannel == NULL) {
2518 LOGE("allocation of raw channel failed");
2519 pthread_mutex_unlock(&mMutex);
2520 return -ENOMEM;
2521 }
2522 newStream->max_buffers = mRawChannel->getNumBuffers();
2523 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2524 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002525 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002527 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2528 mDepthChannel = new QCamera3DepthChannel(
2529 mCameraHandle->camera_handle, mChannelHandle,
2530 mCameraHandle->ops, NULL, NULL, &padding_info,
2531 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2532 mMetadataChannel);
2533 if (NULL == mDepthChannel) {
2534 LOGE("Allocation of depth channel failed");
2535 pthread_mutex_unlock(&mMutex);
2536 return NO_MEMORY;
2537 }
2538 newStream->priv = mDepthChannel;
2539 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2540 } else {
2541 // Max live snapshot inflight buffer is 1. This is to mitigate
2542 // frame drop issues for video snapshot. The more buffers being
2543 // allocated, the more frame drops there are.
2544 mPictureChannel = new QCamera3PicChannel(
2545 mCameraHandle->camera_handle, mChannelHandle,
2546 mCameraHandle->ops, captureResultCb,
2547 setBufferErrorStatus, &padding_info, this, newStream,
2548 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2549 m_bIs4KVideo, isZsl, mMetadataChannel,
2550 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2551 if (mPictureChannel == NULL) {
2552 LOGE("allocation of channel failed");
2553 pthread_mutex_unlock(&mMutex);
2554 return -ENOMEM;
2555 }
2556 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2557 newStream->max_buffers = mPictureChannel->getNumBuffers();
2558 mPictureChannel->overrideYuvSize(
2559 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2560 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002562 break;
2563
2564 default:
2565 LOGE("not a supported format 0x%x", newStream->format);
2566 break;
2567 }
2568 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2569 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2570 } else {
2571 LOGE("Error, Unknown stream type");
2572 pthread_mutex_unlock(&mMutex);
2573 return -EINVAL;
2574 }
2575
2576 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002577 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2578 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002579 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002580 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2582 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2583 }
2584 }
2585
2586 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2587 it != mStreamInfo.end(); it++) {
2588 if ((*it)->stream == newStream) {
2589 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2590 break;
2591 }
2592 }
2593 } else {
2594 // Channel already exists for this stream
2595 // Do nothing for now
2596 }
2597 padding_info = gCamCapability[mCameraId]->padding_info;
2598
Emilian Peev7650c122017-01-19 08:24:33 -08002599 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 * since there is no real stream associated with it
2601 */
Emilian Peev7650c122017-01-19 08:24:33 -08002602 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002603 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2604 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 }
2608
Thierry Strudel2896d122017-02-23 19:18:03 -08002609 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2610 onlyRaw = false;
2611 }
2612
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002613 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002615 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002616 cam_analysis_info_t analysisInfo;
2617 int32_t ret = NO_ERROR;
2618 ret = mCommon.getAnalysisInfo(
2619 FALSE,
2620 analysisFeatureMask,
2621 &analysisInfo);
2622 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002623 cam_color_filter_arrangement_t analysis_color_arrangement =
2624 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2625 CAM_FILTER_ARRANGEMENT_Y :
2626 gCamCapability[mCameraId]->color_arrangement);
2627 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2628 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002629 cam_dimension_t analysisDim;
2630 analysisDim = mCommon.getMatchingDimension(previewSize,
2631 analysisInfo.analysis_recommended_res);
2632
2633 mAnalysisChannel = new QCamera3SupportChannel(
2634 mCameraHandle->camera_handle,
2635 mChannelHandle,
2636 mCameraHandle->ops,
2637 &analysisInfo.analysis_padding_info,
2638 analysisFeatureMask,
2639 CAM_STREAM_TYPE_ANALYSIS,
2640 &analysisDim,
2641 (analysisInfo.analysis_format
2642 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2643 : CAM_FORMAT_YUV_420_NV21),
2644 analysisInfo.hw_analysis_supported,
2645 gCamCapability[mCameraId]->color_arrangement,
2646 this,
2647 0); // force buffer count to 0
2648 } else {
2649 LOGW("getAnalysisInfo failed, ret = %d", ret);
2650 }
2651 if (!mAnalysisChannel) {
2652 LOGW("Analysis channel cannot be created");
2653 }
2654 }
2655
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 //RAW DUMP channel
2657 if (mEnableRawDump && isRawStreamRequested == false){
2658 cam_dimension_t rawDumpSize;
2659 rawDumpSize = getMaxRawSize(mCameraId);
2660 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2661 setPAAFSupport(rawDumpFeatureMask,
2662 CAM_STREAM_TYPE_RAW,
2663 gCamCapability[mCameraId]->color_arrangement);
2664 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2665 mChannelHandle,
2666 mCameraHandle->ops,
2667 rawDumpSize,
2668 &padding_info,
2669 this, rawDumpFeatureMask);
2670 if (!mRawDumpChannel) {
2671 LOGE("Raw Dump channel cannot be created");
2672 pthread_mutex_unlock(&mMutex);
2673 return -ENOMEM;
2674 }
2675 }
2676
Chien-Yu Chenee335912017-02-09 17:53:20 -08002677 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002678 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002679 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002680 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2681 "HDR+ RAW source channel is not created.",
2682 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002683 } else {
2684 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2685 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2686 setPAAFSupport(hdrPlusRawFeatureMask,
2687 CAM_STREAM_TYPE_RAW,
2688 gCamCapability[mCameraId]->color_arrangement);
2689 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2690 mChannelHandle,
2691 mCameraHandle->ops,
2692 rawSize,
2693 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002694 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002695 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002696 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002697 if (!mHdrPlusRawSrcChannel) {
2698 LOGE("HDR+ Raw Source channel cannot be created");
2699 pthread_mutex_unlock(&mMutex);
2700 return -ENOMEM;
2701 }
2702 }
2703 }
2704
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 if (mAnalysisChannel) {
2706 cam_analysis_info_t analysisInfo;
2707 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2708 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2709 CAM_STREAM_TYPE_ANALYSIS;
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2711 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002712 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2714 &analysisInfo);
2715 if (rc != NO_ERROR) {
2716 LOGE("getAnalysisInfo failed, ret = %d", rc);
2717 pthread_mutex_unlock(&mMutex);
2718 return rc;
2719 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002720 cam_color_filter_arrangement_t analysis_color_arrangement =
2721 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2722 CAM_FILTER_ARRANGEMENT_Y :
2723 gCamCapability[mCameraId]->color_arrangement);
2724 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2725 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2726 analysis_color_arrangement);
2727
Thierry Strudel3d639192016-09-09 11:52:26 -07002728 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002729 mCommon.getMatchingDimension(previewSize,
2730 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 mStreamConfigInfo.num_streams++;
2732 }
2733
Thierry Strudel2896d122017-02-23 19:18:03 -08002734 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 cam_analysis_info_t supportInfo;
2736 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2737 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2738 setPAAFSupport(callbackFeatureMask,
2739 CAM_STREAM_TYPE_CALLBACK,
2740 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002741 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002742 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002743 if (ret != NO_ERROR) {
2744 /* Ignore the error for Mono camera
2745 * because the PAAF bit mask is only set
2746 * for CAM_STREAM_TYPE_ANALYSIS stream type
2747 */
2748 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2749 LOGW("getAnalysisInfo failed, ret = %d", ret);
2750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 }
2752 mSupportChannel = new QCamera3SupportChannel(
2753 mCameraHandle->camera_handle,
2754 mChannelHandle,
2755 mCameraHandle->ops,
2756 &gCamCapability[mCameraId]->padding_info,
2757 callbackFeatureMask,
2758 CAM_STREAM_TYPE_CALLBACK,
2759 &QCamera3SupportChannel::kDim,
2760 CAM_FORMAT_YUV_420_NV21,
2761 supportInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002763 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 if (!mSupportChannel) {
2765 LOGE("dummy channel cannot be created");
2766 pthread_mutex_unlock(&mMutex);
2767 return -ENOMEM;
2768 }
2769 }
2770
2771 if (mSupportChannel) {
2772 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2773 QCamera3SupportChannel::kDim;
2774 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2775 CAM_STREAM_TYPE_CALLBACK;
2776 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2777 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2778 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2779 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mStreamConfigInfo.num_streams++;
2782 }
2783
2784 if (mRawDumpChannel) {
2785 cam_dimension_t rawSize;
2786 rawSize = getMaxRawSize(mCameraId);
2787 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2788 rawSize;
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2790 CAM_STREAM_TYPE_RAW;
2791 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2792 CAM_QCOM_FEATURE_NONE;
2793 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2794 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2795 gCamCapability[mCameraId]->color_arrangement);
2796 mStreamConfigInfo.num_streams++;
2797 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002798
2799 if (mHdrPlusRawSrcChannel) {
2800 cam_dimension_t rawSize;
2801 rawSize = getMaxRawSize(mCameraId);
2802 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2803 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2804 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2805 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2806 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2807 gCamCapability[mCameraId]->color_arrangement);
2808 mStreamConfigInfo.num_streams++;
2809 }
2810
Thierry Strudel3d639192016-09-09 11:52:26 -07002811 /* In HFR mode, if video stream is not added, create a dummy channel so that
2812 * ISP can create a batch mode even for preview only case. This channel is
2813 * never 'start'ed (no stream-on), it is only 'initialized' */
2814 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2815 !m_bIsVideo) {
2816 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2817 setPAAFSupport(dummyFeatureMask,
2818 CAM_STREAM_TYPE_VIDEO,
2819 gCamCapability[mCameraId]->color_arrangement);
2820 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2821 mChannelHandle,
2822 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002823 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 this,
2825 &mDummyBatchStream,
2826 CAM_STREAM_TYPE_VIDEO,
2827 dummyFeatureMask,
2828 mMetadataChannel);
2829 if (NULL == mDummyBatchChannel) {
2830 LOGE("creation of mDummyBatchChannel failed."
2831 "Preview will use non-hfr sensor mode ");
2832 }
2833 }
2834 if (mDummyBatchChannel) {
2835 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2836 mDummyBatchStream.width;
2837 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2838 mDummyBatchStream.height;
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2840 CAM_STREAM_TYPE_VIDEO;
2841 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2842 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2843 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2844 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2845 gCamCapability[mCameraId]->color_arrangement);
2846 mStreamConfigInfo.num_streams++;
2847 }
2848
2849 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2850 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002851 m_bIs4KVideo ? 0 :
2852 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002853
2854 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2855 for (pendingRequestIterator i = mPendingRequestsList.begin();
2856 i != mPendingRequestsList.end();) {
2857 i = erasePendingRequest(i);
2858 }
2859 mPendingFrameDropList.clear();
2860 // Initialize/Reset the pending buffers list
2861 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2862 req.mPendingBufferList.clear();
2863 }
2864 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2865
Thierry Strudel3d639192016-09-09 11:52:26 -07002866 mCurJpegMeta.clear();
2867 //Get min frame duration for this streams configuration
2868 deriveMinFrameDuration();
2869
Chien-Yu Chenee335912017-02-09 17:53:20 -08002870 mFirstPreviewIntentSeen = false;
2871
2872 // Disable HRD+ if it's enabled;
2873 disableHdrPlusModeLocked();
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 // Update state
2876 mState = CONFIGURED;
2877
2878 pthread_mutex_unlock(&mMutex);
2879
2880 return rc;
2881}
2882
2883/*===========================================================================
2884 * FUNCTION : validateCaptureRequest
2885 *
2886 * DESCRIPTION: validate a capture request from camera service
2887 *
2888 * PARAMETERS :
2889 * @request : request from framework to process
2890 *
2891 * RETURN :
2892 *
2893 *==========================================================================*/
2894int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002895 camera3_capture_request_t *request,
2896 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002897{
2898 ssize_t idx = 0;
2899 const camera3_stream_buffer_t *b;
2900 CameraMetadata meta;
2901
2902 /* Sanity check the request */
2903 if (request == NULL) {
2904 LOGE("NULL capture request");
2905 return BAD_VALUE;
2906 }
2907
2908 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2909 /*settings cannot be null for the first request*/
2910 return BAD_VALUE;
2911 }
2912
2913 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002914 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2915 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002916 LOGE("Request %d: No output buffers provided!",
2917 __FUNCTION__, frameNumber);
2918 return BAD_VALUE;
2919 }
2920 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2921 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2922 request->num_output_buffers, MAX_NUM_STREAMS);
2923 return BAD_VALUE;
2924 }
2925 if (request->input_buffer != NULL) {
2926 b = request->input_buffer;
2927 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2928 LOGE("Request %d: Buffer %ld: Status not OK!",
2929 frameNumber, (long)idx);
2930 return BAD_VALUE;
2931 }
2932 if (b->release_fence != -1) {
2933 LOGE("Request %d: Buffer %ld: Has a release fence!",
2934 frameNumber, (long)idx);
2935 return BAD_VALUE;
2936 }
2937 if (b->buffer == NULL) {
2938 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2939 frameNumber, (long)idx);
2940 return BAD_VALUE;
2941 }
2942 }
2943
2944 // Validate all buffers
2945 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002946 if (b == NULL) {
2947 return BAD_VALUE;
2948 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002949 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002950 QCamera3ProcessingChannel *channel =
2951 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2952 if (channel == NULL) {
2953 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2954 frameNumber, (long)idx);
2955 return BAD_VALUE;
2956 }
2957 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2958 LOGE("Request %d: Buffer %ld: Status not OK!",
2959 frameNumber, (long)idx);
2960 return BAD_VALUE;
2961 }
2962 if (b->release_fence != -1) {
2963 LOGE("Request %d: Buffer %ld: Has a release fence!",
2964 frameNumber, (long)idx);
2965 return BAD_VALUE;
2966 }
2967 if (b->buffer == NULL) {
2968 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2969 frameNumber, (long)idx);
2970 return BAD_VALUE;
2971 }
2972 if (*(b->buffer) == NULL) {
2973 LOGE("Request %d: Buffer %ld: NULL private handle!",
2974 frameNumber, (long)idx);
2975 return BAD_VALUE;
2976 }
2977 idx++;
2978 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002980 return NO_ERROR;
2981}
2982
2983/*===========================================================================
2984 * FUNCTION : deriveMinFrameDuration
2985 *
2986 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2987 * on currently configured streams.
2988 *
2989 * PARAMETERS : NONE
2990 *
2991 * RETURN : NONE
2992 *
2993 *==========================================================================*/
2994void QCamera3HardwareInterface::deriveMinFrameDuration()
2995{
2996 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2997
2998 maxJpegDim = 0;
2999 maxProcessedDim = 0;
3000 maxRawDim = 0;
3001
3002 // Figure out maximum jpeg, processed, and raw dimensions
3003 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3004 it != mStreamInfo.end(); it++) {
3005
3006 // Input stream doesn't have valid stream_type
3007 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3008 continue;
3009
3010 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3011 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3012 if (dimension > maxJpegDim)
3013 maxJpegDim = dimension;
3014 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3015 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3016 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3017 if (dimension > maxRawDim)
3018 maxRawDim = dimension;
3019 } else {
3020 if (dimension > maxProcessedDim)
3021 maxProcessedDim = dimension;
3022 }
3023 }
3024
3025 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3026 MAX_SIZES_CNT);
3027
3028 //Assume all jpeg dimensions are in processed dimensions.
3029 if (maxJpegDim > maxProcessedDim)
3030 maxProcessedDim = maxJpegDim;
3031 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3032 if (maxProcessedDim > maxRawDim) {
3033 maxRawDim = INT32_MAX;
3034
3035 for (size_t i = 0; i < count; i++) {
3036 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3037 gCamCapability[mCameraId]->raw_dim[i].height;
3038 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3039 maxRawDim = dimension;
3040 }
3041 }
3042
3043 //Find minimum durations for processed, jpeg, and raw
3044 for (size_t i = 0; i < count; i++) {
3045 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3046 gCamCapability[mCameraId]->raw_dim[i].height) {
3047 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3048 break;
3049 }
3050 }
3051 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3052 for (size_t i = 0; i < count; i++) {
3053 if (maxProcessedDim ==
3054 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3055 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3056 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3057 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3058 break;
3059 }
3060 }
3061}
3062
3063/*===========================================================================
3064 * FUNCTION : getMinFrameDuration
3065 *
3066 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3067 * and current request configuration.
3068 *
3069 * PARAMETERS : @request: requset sent by the frameworks
3070 *
3071 * RETURN : min farme duration for a particular request
3072 *
3073 *==========================================================================*/
3074int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3075{
3076 bool hasJpegStream = false;
3077 bool hasRawStream = false;
3078 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3079 const camera3_stream_t *stream = request->output_buffers[i].stream;
3080 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3081 hasJpegStream = true;
3082 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3083 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3084 stream->format == HAL_PIXEL_FORMAT_RAW16)
3085 hasRawStream = true;
3086 }
3087
3088 if (!hasJpegStream)
3089 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3090 else
3091 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3092}
3093
3094/*===========================================================================
3095 * FUNCTION : handleBuffersDuringFlushLock
3096 *
3097 * DESCRIPTION: Account for buffers returned from back-end during flush
3098 * This function is executed while mMutex is held by the caller.
3099 *
3100 * PARAMETERS :
3101 * @buffer: image buffer for the callback
3102 *
3103 * RETURN :
3104 *==========================================================================*/
3105void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3106{
3107 bool buffer_found = false;
3108 for (List<PendingBuffersInRequest>::iterator req =
3109 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3110 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3111 for (List<PendingBufferInfo>::iterator i =
3112 req->mPendingBufferList.begin();
3113 i != req->mPendingBufferList.end(); i++) {
3114 if (i->buffer == buffer->buffer) {
3115 mPendingBuffersMap.numPendingBufsAtFlush--;
3116 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3117 buffer->buffer, req->frame_number,
3118 mPendingBuffersMap.numPendingBufsAtFlush);
3119 buffer_found = true;
3120 break;
3121 }
3122 }
3123 if (buffer_found) {
3124 break;
3125 }
3126 }
3127 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3128 //signal the flush()
3129 LOGD("All buffers returned to HAL. Continue flush");
3130 pthread_cond_signal(&mBuffersCond);
3131 }
3132}
3133
Thierry Strudel3d639192016-09-09 11:52:26 -07003134/*===========================================================================
3135 * FUNCTION : handleBatchMetadata
3136 *
3137 * DESCRIPTION: Handles metadata buffer callback in batch mode
3138 *
3139 * PARAMETERS : @metadata_buf: metadata buffer
3140 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3141 * the meta buf in this method
3142 *
3143 * RETURN :
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::handleBatchMetadata(
3147 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3148{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003149 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003150
3151 if (NULL == metadata_buf) {
3152 LOGE("metadata_buf is NULL");
3153 return;
3154 }
3155 /* In batch mode, the metdata will contain the frame number and timestamp of
3156 * the last frame in the batch. Eg: a batch containing buffers from request
3157 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3158 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3159 * multiple process_capture_results */
3160 metadata_buffer_t *metadata =
3161 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3162 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3163 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3164 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3165 uint32_t frame_number = 0, urgent_frame_number = 0;
3166 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3167 bool invalid_metadata = false;
3168 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3169 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003170 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003171
3172 int32_t *p_frame_number_valid =
3173 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3174 uint32_t *p_frame_number =
3175 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3176 int64_t *p_capture_time =
3177 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3178 int32_t *p_urgent_frame_number_valid =
3179 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3180 uint32_t *p_urgent_frame_number =
3181 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3182
3183 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3184 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3185 (NULL == p_urgent_frame_number)) {
3186 LOGE("Invalid metadata");
3187 invalid_metadata = true;
3188 } else {
3189 frame_number_valid = *p_frame_number_valid;
3190 last_frame_number = *p_frame_number;
3191 last_frame_capture_time = *p_capture_time;
3192 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3193 last_urgent_frame_number = *p_urgent_frame_number;
3194 }
3195
3196 /* In batchmode, when no video buffers are requested, set_parms are sent
3197 * for every capture_request. The difference between consecutive urgent
3198 * frame numbers and frame numbers should be used to interpolate the
3199 * corresponding frame numbers and time stamps */
3200 pthread_mutex_lock(&mMutex);
3201 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003202 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3203 if(idx < 0) {
3204 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3205 last_urgent_frame_number);
3206 mState = ERROR;
3207 pthread_mutex_unlock(&mMutex);
3208 return;
3209 }
3210 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003211 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3212 first_urgent_frame_number;
3213
3214 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3215 urgent_frame_number_valid,
3216 first_urgent_frame_number, last_urgent_frame_number);
3217 }
3218
3219 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003220 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3221 if(idx < 0) {
3222 LOGE("Invalid frame number received: %d. Irrecoverable error",
3223 last_frame_number);
3224 mState = ERROR;
3225 pthread_mutex_unlock(&mMutex);
3226 return;
3227 }
3228 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003229 frameNumDiff = last_frame_number + 1 -
3230 first_frame_number;
3231 mPendingBatchMap.removeItem(last_frame_number);
3232
3233 LOGD("frm: valid: %d frm_num: %d - %d",
3234 frame_number_valid,
3235 first_frame_number, last_frame_number);
3236
3237 }
3238 pthread_mutex_unlock(&mMutex);
3239
3240 if (urgent_frame_number_valid || frame_number_valid) {
3241 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3242 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3243 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3244 urgentFrameNumDiff, last_urgent_frame_number);
3245 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3246 LOGE("frameNumDiff: %d frameNum: %d",
3247 frameNumDiff, last_frame_number);
3248 }
3249
3250 for (size_t i = 0; i < loopCount; i++) {
3251 /* handleMetadataWithLock is called even for invalid_metadata for
3252 * pipeline depth calculation */
3253 if (!invalid_metadata) {
3254 /* Infer frame number. Batch metadata contains frame number of the
3255 * last frame */
3256 if (urgent_frame_number_valid) {
3257 if (i < urgentFrameNumDiff) {
3258 urgent_frame_number =
3259 first_urgent_frame_number + i;
3260 LOGD("inferred urgent frame_number: %d",
3261 urgent_frame_number);
3262 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3263 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3264 } else {
3265 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3266 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3267 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3268 }
3269 }
3270
3271 /* Infer frame number. Batch metadata contains frame number of the
3272 * last frame */
3273 if (frame_number_valid) {
3274 if (i < frameNumDiff) {
3275 frame_number = first_frame_number + i;
3276 LOGD("inferred frame_number: %d", frame_number);
3277 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3278 CAM_INTF_META_FRAME_NUMBER, frame_number);
3279 } else {
3280 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3281 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3282 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3283 }
3284 }
3285
3286 if (last_frame_capture_time) {
3287 //Infer timestamp
3288 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003289 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003291 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003292 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3293 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3294 LOGD("batch capture_time: %lld, capture_time: %lld",
3295 last_frame_capture_time, capture_time);
3296 }
3297 }
3298 pthread_mutex_lock(&mMutex);
3299 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003300 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003301 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3302 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003303 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003304 pthread_mutex_unlock(&mMutex);
3305 }
3306
3307 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003308 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003309 mMetadataChannel->bufDone(metadata_buf);
3310 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003311 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003312 }
3313}
3314
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003315void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3316 camera3_error_msg_code_t errorCode)
3317{
3318 camera3_notify_msg_t notify_msg;
3319 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3320 notify_msg.type = CAMERA3_MSG_ERROR;
3321 notify_msg.message.error.error_code = errorCode;
3322 notify_msg.message.error.error_stream = NULL;
3323 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003324 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003325
3326 return;
3327}
Thierry Strudel3d639192016-09-09 11:52:26 -07003328/*===========================================================================
3329 * FUNCTION : handleMetadataWithLock
3330 *
3331 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3332 *
3333 * PARAMETERS : @metadata_buf: metadata buffer
3334 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3335 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003336 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3337 * last urgent metadata in a batch. Always true for non-batch mode
3338 * @lastMetadataInBatch: Boolean to indicate whether this is the
3339 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3341 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003342 *
3343 * RETURN :
3344 *
3345 *==========================================================================*/
3346void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003347 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003348 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3349 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3353 //during flush do not send metadata from this thread
3354 LOGD("not sending metadata during flush or when mState is error");
3355 if (free_and_bufdone_meta_buf) {
3356 mMetadataChannel->bufDone(metadata_buf);
3357 free(metadata_buf);
3358 }
3359 return;
3360 }
3361
3362 //not in flush
3363 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3364 int32_t frame_number_valid, urgent_frame_number_valid;
3365 uint32_t frame_number, urgent_frame_number;
3366 int64_t capture_time;
3367 nsecs_t currentSysTime;
3368
3369 int32_t *p_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3372 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3373 int32_t *p_urgent_frame_number_valid =
3374 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3375 uint32_t *p_urgent_frame_number =
3376 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3377 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3378 metadata) {
3379 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3380 *p_frame_number_valid, *p_frame_number);
3381 }
3382
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003383 camera_metadata_t *resultMetadata = nullptr;
3384
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3386 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3387 LOGE("Invalid metadata");
3388 if (free_and_bufdone_meta_buf) {
3389 mMetadataChannel->bufDone(metadata_buf);
3390 free(metadata_buf);
3391 }
3392 goto done_metadata;
3393 }
3394 frame_number_valid = *p_frame_number_valid;
3395 frame_number = *p_frame_number;
3396 capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 urgent_frame_number = *p_urgent_frame_number;
3399 currentSysTime = systemTime(CLOCK_MONOTONIC);
3400
3401 // Detect if buffers from any requests are overdue
3402 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003403 int64_t timeout;
3404 {
3405 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3406 // If there is a pending HDR+ request, the following requests may be blocked until the
3407 // HDR+ request is done. So allow a longer timeout.
3408 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3409 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3410 }
3411
3412 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003413 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003414 assert(missed.stream->priv);
3415 if (missed.stream->priv) {
3416 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3417 assert(ch->mStreams[0]);
3418 if (ch->mStreams[0]) {
3419 LOGE("Cancel missing frame = %d, buffer = %p,"
3420 "stream type = %d, stream format = %d",
3421 req.frame_number, missed.buffer,
3422 ch->mStreams[0]->getMyType(), missed.stream->format);
3423 ch->timeoutFrame(req.frame_number);
3424 }
3425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 }
3427 }
3428 }
3429 //Partial result on process_capture_result for timestamp
3430 if (urgent_frame_number_valid) {
3431 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3432 urgent_frame_number, capture_time);
3433
3434 //Recieved an urgent Frame Number, handle it
3435 //using partial results
3436 for (pendingRequestIterator i =
3437 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3438 LOGD("Iterator Frame = %d urgent frame = %d",
3439 i->frame_number, urgent_frame_number);
3440
3441 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3442 (i->partial_result_cnt == 0)) {
3443 LOGE("Error: HAL missed urgent metadata for frame number %d",
3444 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003445 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003446 }
3447
3448 if (i->frame_number == urgent_frame_number &&
3449 i->bUrgentReceived == 0) {
3450
3451 camera3_capture_result_t result;
3452 memset(&result, 0, sizeof(camera3_capture_result_t));
3453
3454 i->partial_result_cnt++;
3455 i->bUrgentReceived = 1;
3456 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003457 result.result = translateCbUrgentMetadataToResultMetadata(
3458 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 // Populate metadata result
3460 result.frame_number = urgent_frame_number;
3461 result.num_output_buffers = 0;
3462 result.output_buffers = NULL;
3463 result.partial_result = i->partial_result_cnt;
3464
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003465 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003466 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003467 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003468 result.partial_result == PARTIAL_RESULT_COUNT);
3469 }
3470
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003471 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003472 LOGD("urgent frame_number = %u, capture_time = %lld",
3473 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003474 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3475 // Instant AEC settled for this frame.
3476 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3477 mInstantAECSettledFrameNumber = urgent_frame_number;
3478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 free_camera_metadata((camera_metadata_t *)result.result);
3480 break;
3481 }
3482 }
3483 }
3484
3485 if (!frame_number_valid) {
3486 LOGD("Not a valid normal frame number, used as SOF only");
3487 if (free_and_bufdone_meta_buf) {
3488 mMetadataChannel->bufDone(metadata_buf);
3489 free(metadata_buf);
3490 }
3491 goto done_metadata;
3492 }
3493 LOGH("valid frame_number = %u, capture_time = %lld",
3494 frame_number, capture_time);
3495
Emilian Peev7650c122017-01-19 08:24:33 -08003496 if (metadata->is_depth_data_valid) {
3497 handleDepthDataLocked(metadata->depth_data, frame_number);
3498 }
3499
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003500 // Check whether any stream buffer corresponding to this is dropped or not
3501 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3502 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3503 for (auto & pendingRequest : mPendingRequestsList) {
3504 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3505 mInstantAECSettledFrameNumber)) {
3506 camera3_notify_msg_t notify_msg = {};
3507 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003508 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003509 QCamera3ProcessingChannel *channel =
3510 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003511 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003512 if (p_cam_frame_drop) {
3513 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003514 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003515 // Got the stream ID for drop frame.
3516 dropFrame = true;
3517 break;
3518 }
3519 }
3520 } else {
3521 // This is instant AEC case.
3522 // For instant AEC drop the stream untill AEC is settled.
3523 dropFrame = true;
3524 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003525
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003526 if (dropFrame) {
3527 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3528 if (p_cam_frame_drop) {
3529 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003531 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003532 } else {
3533 // For instant AEC, inform frame drop and frame number
3534 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3535 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003536 pendingRequest.frame_number, streamID,
3537 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003538 }
3539 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003540 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003541 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003542 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003543 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003544 if (p_cam_frame_drop) {
3545 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003546 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 } else {
3549 // For instant AEC, inform frame drop and frame number
3550 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3551 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003552 pendingRequest.frame_number, streamID,
3553 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 }
3555 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003556 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 PendingFrameDrop.stream_ID = streamID;
3558 // Add the Frame drop info to mPendingFrameDropList
3559 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 }
3562 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003565 for (auto & pendingRequest : mPendingRequestsList) {
3566 // Find the pending request with the frame number.
3567 if (pendingRequest.frame_number == frame_number) {
3568 // Update the sensor timestamp.
3569 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003570
Thierry Strudel3d639192016-09-09 11:52:26 -07003571
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003572 /* Set the timestamp in display metadata so that clients aware of
3573 private_handle such as VT can use this un-modified timestamps.
3574 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003575 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003576
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 // Find channel requiring metadata, meaning internal offline postprocess
3578 // is needed.
3579 //TODO: for now, we don't support two streams requiring metadata at the same time.
3580 // (because we are not making copies, and metadata buffer is not reference counted.
3581 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3583 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 if (iter->need_metadata) {
3585 internalPproc = true;
3586 QCamera3ProcessingChannel *channel =
3587 (QCamera3ProcessingChannel *)iter->stream->priv;
3588 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003589 if(p_is_metabuf_queued != NULL) {
3590 *p_is_metabuf_queued = true;
3591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 break;
3593 }
3594 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003595 for (auto itr = pendingRequest.internalRequestList.begin();
3596 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003597 if (itr->need_metadata) {
3598 internalPproc = true;
3599 QCamera3ProcessingChannel *channel =
3600 (QCamera3ProcessingChannel *)itr->stream->priv;
3601 channel->queueReprocMetadata(metadata_buf);
3602 break;
3603 }
3604 }
3605
Thierry Strudel54dc9782017-02-15 12:12:10 -08003606 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003607 resultMetadata = translateFromHalMetadata(metadata,
3608 pendingRequest.timestamp, pendingRequest.request_id,
3609 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3610 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003611 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003612 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003613 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003614 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003615 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003616 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003618 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003619
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003620 if (pendingRequest.blob_request) {
3621 //Dump tuning metadata if enabled and available
3622 char prop[PROPERTY_VALUE_MAX];
3623 memset(prop, 0, sizeof(prop));
3624 property_get("persist.camera.dumpmetadata", prop, "0");
3625 int32_t enabled = atoi(prop);
3626 if (enabled && metadata->is_tuning_params_valid) {
3627 dumpMetadataToFile(metadata->tuning_params,
3628 mMetaFrameCount,
3629 enabled,
3630 "Snapshot",
3631 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003632 }
3633 }
3634
3635 if (!internalPproc) {
3636 LOGD("couldn't find need_metadata for this metadata");
3637 // Return metadata buffer
3638 if (free_and_bufdone_meta_buf) {
3639 mMetadataChannel->bufDone(metadata_buf);
3640 free(metadata_buf);
3641 }
3642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003643
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 }
3646 }
3647
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 // Try to send out shutter callbacks and capture results.
3649 handlePendingResultsWithLock(frame_number, resultMetadata);
3650 return;
3651
Thierry Strudel3d639192016-09-09 11:52:26 -07003652done_metadata:
3653 for (pendingRequestIterator i = mPendingRequestsList.begin();
3654 i != mPendingRequestsList.end() ;i++) {
3655 i->pipeline_depth++;
3656 }
3657 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3658 unblockRequestIfNecessary();
3659}
3660
3661/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003662 * FUNCTION : handleDepthDataWithLock
3663 *
3664 * DESCRIPTION: Handles incoming depth data
3665 *
3666 * PARAMETERS : @depthData : Depth data
3667 * @frameNumber: Frame number of the incoming depth data
3668 *
3669 * RETURN :
3670 *
3671 *==========================================================================*/
3672void QCamera3HardwareInterface::handleDepthDataLocked(
3673 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3674 uint32_t currentFrameNumber;
3675 buffer_handle_t *depthBuffer;
3676
3677 if (nullptr == mDepthChannel) {
3678 LOGE("Depth channel not present!");
3679 return;
3680 }
3681
3682 camera3_stream_buffer_t resultBuffer =
3683 {.acquire_fence = -1,
3684 .release_fence = -1,
3685 .status = CAMERA3_BUFFER_STATUS_OK,
3686 .buffer = nullptr,
3687 .stream = mDepthChannel->getStream()};
3688 camera3_capture_result_t result =
3689 {.result = nullptr,
3690 .num_output_buffers = 1,
3691 .output_buffers = &resultBuffer,
3692 .partial_result = 0,
3693 .frame_number = 0};
3694
3695 do {
3696 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3697 if (nullptr == depthBuffer) {
3698 break;
3699 }
3700
3701 result.frame_number = currentFrameNumber;
3702 resultBuffer.buffer = depthBuffer;
3703 if (currentFrameNumber == frameNumber) {
3704 int32_t rc = mDepthChannel->populateDepthData(depthData,
3705 frameNumber);
3706 if (NO_ERROR != rc) {
3707 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3708 } else {
3709 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3710 }
3711 } else if (currentFrameNumber > frameNumber) {
3712 break;
3713 } else {
3714 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3715 {{currentFrameNumber, mDepthChannel->getStream(),
3716 CAMERA3_MSG_ERROR_BUFFER}}};
3717 orchestrateNotify(&notify_msg);
3718
3719 LOGE("Depth buffer for frame number: %d is missing "
3720 "returning back!", currentFrameNumber);
3721 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3722 }
3723 mDepthChannel->unmapBuffer(currentFrameNumber);
3724
3725 orchestrateResult(&result);
3726 } while (currentFrameNumber < frameNumber);
3727}
3728
3729/*===========================================================================
3730 * FUNCTION : notifyErrorFoPendingDepthData
3731 *
3732 * DESCRIPTION: Returns error for any pending depth buffers
3733 *
3734 * PARAMETERS : depthCh - depth channel that needs to get flushed
3735 *
3736 * RETURN :
3737 *
3738 *==========================================================================*/
3739void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3740 QCamera3DepthChannel *depthCh) {
3741 uint32_t currentFrameNumber;
3742 buffer_handle_t *depthBuffer;
3743
3744 if (nullptr == depthCh) {
3745 return;
3746 }
3747
3748 camera3_notify_msg_t notify_msg =
3749 {.type = CAMERA3_MSG_ERROR,
3750 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3751 camera3_stream_buffer_t resultBuffer =
3752 {.acquire_fence = -1,
3753 .release_fence = -1,
3754 .buffer = nullptr,
3755 .stream = depthCh->getStream(),
3756 .status = CAMERA3_BUFFER_STATUS_ERROR};
3757 camera3_capture_result_t result =
3758 {.result = nullptr,
3759 .frame_number = 0,
3760 .num_output_buffers = 1,
3761 .partial_result = 0,
3762 .output_buffers = &resultBuffer};
3763
3764 while (nullptr !=
3765 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3766 depthCh->unmapBuffer(currentFrameNumber);
3767
3768 notify_msg.message.error.frame_number = currentFrameNumber;
3769 orchestrateNotify(&notify_msg);
3770
3771 resultBuffer.buffer = depthBuffer;
3772 result.frame_number = currentFrameNumber;
3773 orchestrateResult(&result);
3774 };
3775}
3776
3777/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003778 * FUNCTION : hdrPlusPerfLock
3779 *
3780 * DESCRIPTION: perf lock for HDR+ using custom intent
3781 *
3782 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3783 *
3784 * RETURN : None
3785 *
3786 *==========================================================================*/
3787void QCamera3HardwareInterface::hdrPlusPerfLock(
3788 mm_camera_super_buf_t *metadata_buf)
3789{
3790 if (NULL == metadata_buf) {
3791 LOGE("metadata_buf is NULL");
3792 return;
3793 }
3794 metadata_buffer_t *metadata =
3795 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3796 int32_t *p_frame_number_valid =
3797 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3798 uint32_t *p_frame_number =
3799 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3800
3801 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3802 LOGE("%s: Invalid metadata", __func__);
3803 return;
3804 }
3805
3806 //acquire perf lock for 5 sec after the last HDR frame is captured
3807 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3808 if ((p_frame_number != NULL) &&
3809 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003810 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003811 }
3812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813}
3814
3815/*===========================================================================
3816 * FUNCTION : handleInputBufferWithLock
3817 *
3818 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3819 *
3820 * PARAMETERS : @frame_number: frame number of the input buffer
3821 *
3822 * RETURN :
3823 *
3824 *==========================================================================*/
3825void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3826{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003827 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003828 pendingRequestIterator i = mPendingRequestsList.begin();
3829 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3830 i++;
3831 }
3832 if (i != mPendingRequestsList.end() && i->input_buffer) {
3833 //found the right request
3834 if (!i->shutter_notified) {
3835 CameraMetadata settings;
3836 camera3_notify_msg_t notify_msg;
3837 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3838 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3839 if(i->settings) {
3840 settings = i->settings;
3841 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3842 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3843 } else {
3844 LOGE("No timestamp in input settings! Using current one.");
3845 }
3846 } else {
3847 LOGE("Input settings missing!");
3848 }
3849
3850 notify_msg.type = CAMERA3_MSG_SHUTTER;
3851 notify_msg.message.shutter.frame_number = frame_number;
3852 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003853 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003854 i->shutter_notified = true;
3855 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3856 i->frame_number, notify_msg.message.shutter.timestamp);
3857 }
3858
3859 if (i->input_buffer->release_fence != -1) {
3860 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3861 close(i->input_buffer->release_fence);
3862 if (rc != OK) {
3863 LOGE("input buffer sync wait failed %d", rc);
3864 }
3865 }
3866
3867 camera3_capture_result result;
3868 memset(&result, 0, sizeof(camera3_capture_result));
3869 result.frame_number = frame_number;
3870 result.result = i->settings;
3871 result.input_buffer = i->input_buffer;
3872 result.partial_result = PARTIAL_RESULT_COUNT;
3873
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003874 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875 LOGD("Input request metadata and input buffer frame_number = %u",
3876 i->frame_number);
3877 i = erasePendingRequest(i);
3878 } else {
3879 LOGE("Could not find input request for frame number %d", frame_number);
3880 }
3881}
3882
3883/*===========================================================================
3884 * FUNCTION : handleBufferWithLock
3885 *
3886 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3887 *
3888 * PARAMETERS : @buffer: image buffer for the callback
3889 * @frame_number: frame number of the image buffer
3890 *
3891 * RETURN :
3892 *
3893 *==========================================================================*/
3894void QCamera3HardwareInterface::handleBufferWithLock(
3895 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3896{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003897 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003898
3899 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3900 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3901 }
3902
Thierry Strudel3d639192016-09-09 11:52:26 -07003903 /* Nothing to be done during error state */
3904 if ((ERROR == mState) || (DEINIT == mState)) {
3905 return;
3906 }
3907 if (mFlushPerf) {
3908 handleBuffersDuringFlushLock(buffer);
3909 return;
3910 }
3911 //not in flush
3912 // If the frame number doesn't exist in the pending request list,
3913 // directly send the buffer to the frameworks, and update pending buffers map
3914 // Otherwise, book-keep the buffer.
3915 pendingRequestIterator i = mPendingRequestsList.begin();
3916 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3917 i++;
3918 }
3919 if (i == mPendingRequestsList.end()) {
3920 // Verify all pending requests frame_numbers are greater
3921 for (pendingRequestIterator j = mPendingRequestsList.begin();
3922 j != mPendingRequestsList.end(); j++) {
3923 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3924 LOGW("Error: pending live frame number %d is smaller than %d",
3925 j->frame_number, frame_number);
3926 }
3927 }
3928 camera3_capture_result_t result;
3929 memset(&result, 0, sizeof(camera3_capture_result_t));
3930 result.result = NULL;
3931 result.frame_number = frame_number;
3932 result.num_output_buffers = 1;
3933 result.partial_result = 0;
3934 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3935 m != mPendingFrameDropList.end(); m++) {
3936 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3937 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3938 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3939 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3940 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3941 frame_number, streamID);
3942 m = mPendingFrameDropList.erase(m);
3943 break;
3944 }
3945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003946 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003947 result.output_buffers = buffer;
3948 LOGH("result frame_number = %d, buffer = %p",
3949 frame_number, buffer->buffer);
3950
3951 mPendingBuffersMap.removeBuf(buffer->buffer);
3952
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003953 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003954 } else {
3955 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003956 if (i->input_buffer->release_fence != -1) {
3957 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3958 close(i->input_buffer->release_fence);
3959 if (rc != OK) {
3960 LOGE("input buffer sync wait failed %d", rc);
3961 }
3962 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003963 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003964
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003965 // Put buffer into the pending request
3966 for (auto &requestedBuffer : i->buffers) {
3967 if (requestedBuffer.stream == buffer->stream) {
3968 if (requestedBuffer.buffer != nullptr) {
3969 LOGE("Error: buffer is already set");
3970 } else {
3971 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3972 sizeof(camera3_stream_buffer_t));
3973 *(requestedBuffer.buffer) = *buffer;
3974 LOGH("cache buffer %p at result frame_number %u",
3975 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003976 }
3977 }
3978 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003979
3980 if (i->input_buffer) {
3981 // For a reprocessing request, try to send out shutter callback and result metadata.
3982 handlePendingResultsWithLock(frame_number, nullptr);
3983 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003984 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003985
3986 if (mPreviewStarted == false) {
3987 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3988 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3989 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3990 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3991 mPreviewStarted = true;
3992
3993 // Set power hint for preview
3994 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3995 }
3996 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003997}
3998
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003999void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4000 const camera_metadata_t *resultMetadata)
4001{
4002 // Find the pending request for this result metadata.
4003 auto requestIter = mPendingRequestsList.begin();
4004 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4005 requestIter++;
4006 }
4007
4008 if (requestIter == mPendingRequestsList.end()) {
4009 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4010 return;
4011 }
4012
4013 // Update the result metadata
4014 requestIter->resultMetadata = resultMetadata;
4015
4016 // Check what type of request this is.
4017 bool liveRequest = false;
4018 if (requestIter->hdrplus) {
4019 // HDR+ request doesn't have partial results.
4020 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4021 } else if (requestIter->input_buffer != nullptr) {
4022 // Reprocessing request result is the same as settings.
4023 requestIter->resultMetadata = requestIter->settings;
4024 // Reprocessing request doesn't have partial results.
4025 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4026 } else {
4027 liveRequest = true;
4028 requestIter->partial_result_cnt++;
4029 mPendingLiveRequest--;
4030
4031 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004032 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4033 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004034 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4035 }
4036 }
4037
4038 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4039 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4040 bool readyToSend = true;
4041
4042 // Iterate through the pending requests to send out shutter callbacks and results that are
4043 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4044 // live requests that don't have result metadata yet.
4045 auto iter = mPendingRequestsList.begin();
4046 while (iter != mPendingRequestsList.end()) {
4047 // Check if current pending request is ready. If it's not ready, the following pending
4048 // requests are also not ready.
4049 if (readyToSend && iter->resultMetadata == nullptr) {
4050 readyToSend = false;
4051 }
4052
4053 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4054
4055 std::vector<camera3_stream_buffer_t> outputBuffers;
4056
4057 camera3_capture_result_t result = {};
4058 result.frame_number = iter->frame_number;
4059 result.result = iter->resultMetadata;
4060 result.partial_result = iter->partial_result_cnt;
4061
4062 // If this pending buffer has result metadata, we may be able to send out shutter callback
4063 // and result metadata.
4064 if (iter->resultMetadata != nullptr) {
4065 if (!readyToSend) {
4066 // If any of the previous pending request is not ready, this pending request is
4067 // also not ready to send in order to keep shutter callbacks and result metadata
4068 // in order.
4069 iter++;
4070 continue;
4071 }
4072
4073 // Invoke shutter callback if not yet.
4074 if (!iter->shutter_notified) {
4075 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4076
4077 // Find the timestamp in HDR+ result metadata
4078 camera_metadata_ro_entry_t entry;
4079 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4080 ANDROID_SENSOR_TIMESTAMP, &entry);
4081 if (res != OK) {
4082 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4083 __FUNCTION__, iter->frame_number, strerror(-res), res);
4084 } else {
4085 timestamp = entry.data.i64[0];
4086 }
4087
4088 camera3_notify_msg_t notify_msg = {};
4089 notify_msg.type = CAMERA3_MSG_SHUTTER;
4090 notify_msg.message.shutter.frame_number = iter->frame_number;
4091 notify_msg.message.shutter.timestamp = timestamp;
4092 orchestrateNotify(&notify_msg);
4093 iter->shutter_notified = true;
4094 }
4095
4096 result.input_buffer = iter->input_buffer;
4097
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004098 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4099 // If the result metadata belongs to a live request, notify errors for previous pending
4100 // live requests.
4101 mPendingLiveRequest--;
4102
4103 CameraMetadata dummyMetadata;
4104 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4105 result.result = dummyMetadata.release();
4106
4107 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004108
4109 // partial_result should be PARTIAL_RESULT_CNT in case of
4110 // ERROR_RESULT.
4111 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4112 result.partial_result = PARTIAL_RESULT_COUNT;
4113
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004114 } else {
4115 iter++;
4116 continue;
4117 }
4118
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004119 // Prepare output buffer array
4120 for (auto bufferInfoIter = iter->buffers.begin();
4121 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4122 if (bufferInfoIter->buffer != nullptr) {
4123
4124 QCamera3Channel *channel =
4125 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4126 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4127
4128 // Check if this buffer is a dropped frame.
4129 auto frameDropIter = mPendingFrameDropList.begin();
4130 while (frameDropIter != mPendingFrameDropList.end()) {
4131 if((frameDropIter->stream_ID == streamID) &&
4132 (frameDropIter->frame_number == frameNumber)) {
4133 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4134 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4135 streamID);
4136 mPendingFrameDropList.erase(frameDropIter);
4137 break;
4138 } else {
4139 frameDropIter++;
4140 }
4141 }
4142
4143 // Check buffer error status
4144 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4145 bufferInfoIter->buffer->buffer);
4146 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4147
4148 outputBuffers.push_back(*(bufferInfoIter->buffer));
4149 free(bufferInfoIter->buffer);
4150 bufferInfoIter->buffer = NULL;
4151 }
4152 }
4153
4154 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4155 result.num_output_buffers = outputBuffers.size();
4156
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 orchestrateResult(&result);
4158
4159 // For reprocessing, result metadata is the same as settings so do not free it here to
4160 // avoid double free.
4161 if (result.result != iter->settings) {
4162 free_camera_metadata((camera_metadata_t *)result.result);
4163 }
4164 iter->resultMetadata = nullptr;
4165 iter = erasePendingRequest(iter);
4166 }
4167
4168 if (liveRequest) {
4169 for (auto &iter : mPendingRequestsList) {
4170 // Increment pipeline depth for the following pending requests.
4171 if (iter.frame_number > frameNumber) {
4172 iter.pipeline_depth++;
4173 }
4174 }
4175 }
4176
4177 unblockRequestIfNecessary();
4178}
4179
Thierry Strudel3d639192016-09-09 11:52:26 -07004180/*===========================================================================
4181 * FUNCTION : unblockRequestIfNecessary
4182 *
4183 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4184 * that mMutex is held when this function is called.
4185 *
4186 * PARAMETERS :
4187 *
4188 * RETURN :
4189 *
4190 *==========================================================================*/
4191void QCamera3HardwareInterface::unblockRequestIfNecessary()
4192{
4193 // Unblock process_capture_request
4194 pthread_cond_signal(&mRequestCond);
4195}
4196
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004197/*===========================================================================
4198 * FUNCTION : isHdrSnapshotRequest
4199 *
4200 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4201 *
4202 * PARAMETERS : camera3 request structure
4203 *
4204 * RETURN : boolean decision variable
4205 *
4206 *==========================================================================*/
4207bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4208{
4209 if (request == NULL) {
4210 LOGE("Invalid request handle");
4211 assert(0);
4212 return false;
4213 }
4214
4215 if (!mForceHdrSnapshot) {
4216 CameraMetadata frame_settings;
4217 frame_settings = request->settings;
4218
4219 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4220 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4221 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4222 return false;
4223 }
4224 } else {
4225 return false;
4226 }
4227
4228 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4229 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4230 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4231 return false;
4232 }
4233 } else {
4234 return false;
4235 }
4236 }
4237
4238 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4239 if (request->output_buffers[i].stream->format
4240 == HAL_PIXEL_FORMAT_BLOB) {
4241 return true;
4242 }
4243 }
4244
4245 return false;
4246}
4247/*===========================================================================
4248 * FUNCTION : orchestrateRequest
4249 *
4250 * DESCRIPTION: Orchestrates a capture request from camera service
4251 *
4252 * PARAMETERS :
4253 * @request : request from framework to process
4254 *
4255 * RETURN : Error status codes
4256 *
4257 *==========================================================================*/
4258int32_t QCamera3HardwareInterface::orchestrateRequest(
4259 camera3_capture_request_t *request)
4260{
4261
4262 uint32_t originalFrameNumber = request->frame_number;
4263 uint32_t originalOutputCount = request->num_output_buffers;
4264 const camera_metadata_t *original_settings = request->settings;
4265 List<InternalRequest> internallyRequestedStreams;
4266 List<InternalRequest> emptyInternalList;
4267
4268 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4269 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4270 uint32_t internalFrameNumber;
4271 CameraMetadata modified_meta;
4272
4273
4274 /* Add Blob channel to list of internally requested streams */
4275 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4276 if (request->output_buffers[i].stream->format
4277 == HAL_PIXEL_FORMAT_BLOB) {
4278 InternalRequest streamRequested;
4279 streamRequested.meteringOnly = 1;
4280 streamRequested.need_metadata = 0;
4281 streamRequested.stream = request->output_buffers[i].stream;
4282 internallyRequestedStreams.push_back(streamRequested);
4283 }
4284 }
4285 request->num_output_buffers = 0;
4286 auto itr = internallyRequestedStreams.begin();
4287
4288 /* Modify setting to set compensation */
4289 modified_meta = request->settings;
4290 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4291 uint8_t aeLock = 1;
4292 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4293 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4294 camera_metadata_t *modified_settings = modified_meta.release();
4295 request->settings = modified_settings;
4296
4297 /* Capture Settling & -2x frame */
4298 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4299 request->frame_number = internalFrameNumber;
4300 processCaptureRequest(request, internallyRequestedStreams);
4301
4302 request->num_output_buffers = originalOutputCount;
4303 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4304 request->frame_number = internalFrameNumber;
4305 processCaptureRequest(request, emptyInternalList);
4306 request->num_output_buffers = 0;
4307
4308 modified_meta = modified_settings;
4309 expCompensation = 0;
4310 aeLock = 1;
4311 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4312 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4313 modified_settings = modified_meta.release();
4314 request->settings = modified_settings;
4315
4316 /* Capture Settling & 0X frame */
4317
4318 itr = internallyRequestedStreams.begin();
4319 if (itr == internallyRequestedStreams.end()) {
4320 LOGE("Error Internally Requested Stream list is empty");
4321 assert(0);
4322 } else {
4323 itr->need_metadata = 0;
4324 itr->meteringOnly = 1;
4325 }
4326
4327 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4328 request->frame_number = internalFrameNumber;
4329 processCaptureRequest(request, internallyRequestedStreams);
4330
4331 itr = internallyRequestedStreams.begin();
4332 if (itr == internallyRequestedStreams.end()) {
4333 ALOGE("Error Internally Requested Stream list is empty");
4334 assert(0);
4335 } else {
4336 itr->need_metadata = 1;
4337 itr->meteringOnly = 0;
4338 }
4339
4340 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4341 request->frame_number = internalFrameNumber;
4342 processCaptureRequest(request, internallyRequestedStreams);
4343
4344 /* Capture 2X frame*/
4345 modified_meta = modified_settings;
4346 expCompensation = GB_HDR_2X_STEP_EV;
4347 aeLock = 1;
4348 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4349 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4350 modified_settings = modified_meta.release();
4351 request->settings = modified_settings;
4352
4353 itr = internallyRequestedStreams.begin();
4354 if (itr == internallyRequestedStreams.end()) {
4355 ALOGE("Error Internally Requested Stream list is empty");
4356 assert(0);
4357 } else {
4358 itr->need_metadata = 0;
4359 itr->meteringOnly = 1;
4360 }
4361 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4362 request->frame_number = internalFrameNumber;
4363 processCaptureRequest(request, internallyRequestedStreams);
4364
4365 itr = internallyRequestedStreams.begin();
4366 if (itr == internallyRequestedStreams.end()) {
4367 ALOGE("Error Internally Requested Stream list is empty");
4368 assert(0);
4369 } else {
4370 itr->need_metadata = 1;
4371 itr->meteringOnly = 0;
4372 }
4373
4374 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4375 request->frame_number = internalFrameNumber;
4376 processCaptureRequest(request, internallyRequestedStreams);
4377
4378
4379 /* Capture 2X on original streaming config*/
4380 internallyRequestedStreams.clear();
4381
4382 /* Restore original settings pointer */
4383 request->settings = original_settings;
4384 } else {
4385 uint32_t internalFrameNumber;
4386 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 return processCaptureRequest(request, internallyRequestedStreams);
4389 }
4390
4391 return NO_ERROR;
4392}
4393
4394/*===========================================================================
4395 * FUNCTION : orchestrateResult
4396 *
4397 * DESCRIPTION: Orchestrates a capture result to camera service
4398 *
4399 * PARAMETERS :
4400 * @request : request from framework to process
4401 *
4402 * RETURN :
4403 *
4404 *==========================================================================*/
4405void QCamera3HardwareInterface::orchestrateResult(
4406 camera3_capture_result_t *result)
4407{
4408 uint32_t frameworkFrameNumber;
4409 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4410 frameworkFrameNumber);
4411 if (rc != NO_ERROR) {
4412 LOGE("Cannot find translated frameworkFrameNumber");
4413 assert(0);
4414 } else {
4415 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004416 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004417 } else {
4418 result->frame_number = frameworkFrameNumber;
4419 mCallbackOps->process_capture_result(mCallbackOps, result);
4420 }
4421 }
4422}
4423
4424/*===========================================================================
4425 * FUNCTION : orchestrateNotify
4426 *
4427 * DESCRIPTION: Orchestrates a notify to camera service
4428 *
4429 * PARAMETERS :
4430 * @request : request from framework to process
4431 *
4432 * RETURN :
4433 *
4434 *==========================================================================*/
4435void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4436{
4437 uint32_t frameworkFrameNumber;
4438 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004439 int32_t rc = NO_ERROR;
4440
4441 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004442 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004443
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004444 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004445 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4446 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4447 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004448 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004449 LOGE("Cannot find translated frameworkFrameNumber");
4450 assert(0);
4451 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004452 }
4453 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004454
4455 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4456 LOGD("Internal Request drop the notifyCb");
4457 } else {
4458 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4459 mCallbackOps->notify(mCallbackOps, notify_msg);
4460 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004461}
4462
4463/*===========================================================================
4464 * FUNCTION : FrameNumberRegistry
4465 *
4466 * DESCRIPTION: Constructor
4467 *
4468 * PARAMETERS :
4469 *
4470 * RETURN :
4471 *
4472 *==========================================================================*/
4473FrameNumberRegistry::FrameNumberRegistry()
4474{
4475 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4476}
4477
4478/*===========================================================================
4479 * FUNCTION : ~FrameNumberRegistry
4480 *
4481 * DESCRIPTION: Destructor
4482 *
4483 * PARAMETERS :
4484 *
4485 * RETURN :
4486 *
4487 *==========================================================================*/
4488FrameNumberRegistry::~FrameNumberRegistry()
4489{
4490}
4491
4492/*===========================================================================
4493 * FUNCTION : PurgeOldEntriesLocked
4494 *
4495 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4496 *
4497 * PARAMETERS :
4498 *
4499 * RETURN : NONE
4500 *
4501 *==========================================================================*/
4502void FrameNumberRegistry::purgeOldEntriesLocked()
4503{
4504 while (_register.begin() != _register.end()) {
4505 auto itr = _register.begin();
4506 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4507 _register.erase(itr);
4508 } else {
4509 return;
4510 }
4511 }
4512}
4513
4514/*===========================================================================
4515 * FUNCTION : allocStoreInternalFrameNumber
4516 *
4517 * DESCRIPTION: Method to note down a framework request and associate a new
4518 * internal request number against it
4519 *
4520 * PARAMETERS :
4521 * @fFrameNumber: Identifier given by framework
4522 * @internalFN : Output parameter which will have the newly generated internal
4523 * entry
4524 *
4525 * RETURN : Error code
4526 *
4527 *==========================================================================*/
4528int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4529 uint32_t &internalFrameNumber)
4530{
4531 Mutex::Autolock lock(mRegistryLock);
4532 internalFrameNumber = _nextFreeInternalNumber++;
4533 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4534 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4535 purgeOldEntriesLocked();
4536 return NO_ERROR;
4537}
4538
4539/*===========================================================================
4540 * FUNCTION : generateStoreInternalFrameNumber
4541 *
4542 * DESCRIPTION: Method to associate a new internal request number independent
4543 * of any associate with framework requests
4544 *
4545 * PARAMETERS :
4546 * @internalFrame#: Output parameter which will have the newly generated internal
4547 *
4548 *
4549 * RETURN : Error code
4550 *
4551 *==========================================================================*/
4552int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4553{
4554 Mutex::Autolock lock(mRegistryLock);
4555 internalFrameNumber = _nextFreeInternalNumber++;
4556 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4557 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4558 purgeOldEntriesLocked();
4559 return NO_ERROR;
4560}
4561
4562/*===========================================================================
4563 * FUNCTION : getFrameworkFrameNumber
4564 *
4565 * DESCRIPTION: Method to query the framework framenumber given an internal #
4566 *
4567 * PARAMETERS :
4568 * @internalFrame#: Internal reference
4569 * @frameworkframenumber: Output parameter holding framework frame entry
4570 *
4571 * RETURN : Error code
4572 *
4573 *==========================================================================*/
4574int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4575 uint32_t &frameworkFrameNumber)
4576{
4577 Mutex::Autolock lock(mRegistryLock);
4578 auto itr = _register.find(internalFrameNumber);
4579 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004580 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004581 return -ENOENT;
4582 }
4583
4584 frameworkFrameNumber = itr->second;
4585 purgeOldEntriesLocked();
4586 return NO_ERROR;
4587}
Thierry Strudel3d639192016-09-09 11:52:26 -07004588
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004589status_t QCamera3HardwareInterface::fillPbStreamConfig(
4590 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4591 QCamera3Channel *channel, uint32_t streamIndex) {
4592 if (config == nullptr) {
4593 LOGE("%s: config is null", __FUNCTION__);
4594 return BAD_VALUE;
4595 }
4596
4597 if (channel == nullptr) {
4598 LOGE("%s: channel is null", __FUNCTION__);
4599 return BAD_VALUE;
4600 }
4601
4602 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4603 if (stream == nullptr) {
4604 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4605 return NAME_NOT_FOUND;
4606 }
4607
4608 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4609 if (streamInfo == nullptr) {
4610 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4611 return NAME_NOT_FOUND;
4612 }
4613
4614 config->id = pbStreamId;
4615 config->image.width = streamInfo->dim.width;
4616 config->image.height = streamInfo->dim.height;
4617 config->image.padding = 0;
4618 config->image.format = pbStreamFormat;
4619
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004620 uint32_t totalPlaneSize = 0;
4621
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004622 // Fill plane information.
4623 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4624 pbcamera::PlaneConfiguration plane;
4625 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4626 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4627 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004628
4629 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004630 }
4631
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004632 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004633 return OK;
4634}
4635
Thierry Strudel3d639192016-09-09 11:52:26 -07004636/*===========================================================================
4637 * FUNCTION : processCaptureRequest
4638 *
4639 * DESCRIPTION: process a capture request from camera service
4640 *
4641 * PARAMETERS :
4642 * @request : request from framework to process
4643 *
4644 * RETURN :
4645 *
4646 *==========================================================================*/
4647int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004648 camera3_capture_request_t *request,
4649 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004650{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004651 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004652 int rc = NO_ERROR;
4653 int32_t request_id;
4654 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004655 bool isVidBufRequested = false;
4656 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004657 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004658
4659 pthread_mutex_lock(&mMutex);
4660
4661 // Validate current state
4662 switch (mState) {
4663 case CONFIGURED:
4664 case STARTED:
4665 /* valid state */
4666 break;
4667
4668 case ERROR:
4669 pthread_mutex_unlock(&mMutex);
4670 handleCameraDeviceError();
4671 return -ENODEV;
4672
4673 default:
4674 LOGE("Invalid state %d", mState);
4675 pthread_mutex_unlock(&mMutex);
4676 return -ENODEV;
4677 }
4678
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004679 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004680 if (rc != NO_ERROR) {
4681 LOGE("incoming request is not valid");
4682 pthread_mutex_unlock(&mMutex);
4683 return rc;
4684 }
4685
4686 meta = request->settings;
4687
4688 // For first capture request, send capture intent, and
4689 // stream on all streams
4690 if (mState == CONFIGURED) {
4691 // send an unconfigure to the backend so that the isp
4692 // resources are deallocated
4693 if (!mFirstConfiguration) {
4694 cam_stream_size_info_t stream_config_info;
4695 int32_t hal_version = CAM_HAL_V3;
4696 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4697 stream_config_info.buffer_info.min_buffers =
4698 MIN_INFLIGHT_REQUESTS;
4699 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004700 m_bIs4KVideo ? 0 :
4701 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004702 clear_metadata_buffer(mParameters);
4703 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4704 CAM_INTF_PARM_HAL_VERSION, hal_version);
4705 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4706 CAM_INTF_META_STREAM_INFO, stream_config_info);
4707 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4708 mParameters);
4709 if (rc < 0) {
4710 LOGE("set_parms for unconfigure failed");
4711 pthread_mutex_unlock(&mMutex);
4712 return rc;
4713 }
4714 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004715 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004717 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004718 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004719 property_get("persist.camera.is_type", is_type_value, "4");
4720 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4721 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4722 property_get("persist.camera.is_type_preview", is_type_value, "4");
4723 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4724 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004725
4726 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4727 int32_t hal_version = CAM_HAL_V3;
4728 uint8_t captureIntent =
4729 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4730 mCaptureIntent = captureIntent;
4731 clear_metadata_buffer(mParameters);
4732 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4733 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4734 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004735 if (mFirstConfiguration) {
4736 // configure instant AEC
4737 // Instant AEC is a session based parameter and it is needed only
4738 // once per complete session after open camera.
4739 // i.e. This is set only once for the first capture request, after open camera.
4740 setInstantAEC(meta);
4741 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004742 uint8_t fwkVideoStabMode=0;
4743 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4744 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4745 }
4746
4747 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4748 // turn it on for video/preview
4749 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4750 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004751 int32_t vsMode;
4752 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4754 rc = BAD_VALUE;
4755 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004756 LOGD("setEis %d", setEis);
4757 bool eis3Supported = false;
4758 size_t count = IS_TYPE_MAX;
4759 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4760 for (size_t i = 0; i < count; i++) {
4761 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4762 eis3Supported = true;
4763 break;
4764 }
4765 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004766
4767 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004768 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004769 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4770 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004771 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4772 is_type = isTypePreview;
4773 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4774 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4775 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004776 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004777 } else {
4778 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004779 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004780 } else {
4781 is_type = IS_TYPE_NONE;
4782 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004784 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4786 }
4787 }
4788
4789 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4790 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4791
Thierry Strudel54dc9782017-02-15 12:12:10 -08004792 //Disable tintless only if the property is set to 0
4793 memset(prop, 0, sizeof(prop));
4794 property_get("persist.camera.tintless.enable", prop, "1");
4795 int32_t tintless_value = atoi(prop);
4796
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4798 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004799
Thierry Strudel3d639192016-09-09 11:52:26 -07004800 //Disable CDS for HFR mode or if DIS/EIS is on.
4801 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4802 //after every configure_stream
4803 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4804 (m_bIsVideo)) {
4805 int32_t cds = CAM_CDS_MODE_OFF;
4806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4807 CAM_INTF_PARM_CDS_MODE, cds))
4808 LOGE("Failed to disable CDS for HFR mode");
4809
4810 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004811
4812 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4813 uint8_t* use_av_timer = NULL;
4814
4815 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004816 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004817 use_av_timer = &m_debug_avtimer;
4818 }
4819 else{
4820 use_av_timer =
4821 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004822 if (use_av_timer) {
4823 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4824 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004825 }
4826
4827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4828 rc = BAD_VALUE;
4829 }
4830 }
4831
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 setMobicat();
4833
4834 /* Set fps and hfr mode while sending meta stream info so that sensor
4835 * can configure appropriate streaming mode */
4836 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4838 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4840 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 if (rc == NO_ERROR) {
4842 int32_t max_fps =
4843 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004844 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4846 }
4847 /* For HFR, more buffers are dequeued upfront to improve the performance */
4848 if (mBatchSize) {
4849 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4850 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4851 }
4852 }
4853 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 LOGE("setHalFpsRange failed");
4855 }
4856 }
4857 if (meta.exists(ANDROID_CONTROL_MODE)) {
4858 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4859 rc = extractSceneMode(meta, metaMode, mParameters);
4860 if (rc != NO_ERROR) {
4861 LOGE("extractSceneMode failed");
4862 }
4863 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004864 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004865
Thierry Strudel04e026f2016-10-10 11:27:36 -07004866 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4867 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4868 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4869 rc = setVideoHdrMode(mParameters, vhdr);
4870 if (rc != NO_ERROR) {
4871 LOGE("setVideoHDR is failed");
4872 }
4873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 //TODO: validate the arguments, HSV scenemode should have only the
4876 //advertised fps ranges
4877
4878 /*set the capture intent, hal version, tintless, stream info,
4879 *and disenable parameters to the backend*/
4880 LOGD("set_parms META_STREAM_INFO " );
4881 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004882 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4883 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 mStreamConfigInfo.type[i],
4885 mStreamConfigInfo.stream_sizes[i].width,
4886 mStreamConfigInfo.stream_sizes[i].height,
4887 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 mStreamConfigInfo.format[i],
4889 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004890 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4893 mParameters);
4894 if (rc < 0) {
4895 LOGE("set_parms failed for hal version, stream info");
4896 }
4897
Chien-Yu Chenee335912017-02-09 17:53:20 -08004898 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4899 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 if (rc != NO_ERROR) {
4901 LOGE("Failed to get sensor output size");
4902 pthread_mutex_unlock(&mMutex);
4903 goto error_exit;
4904 }
4905
4906 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4907 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004908 mSensorModeInfo.active_array_size.width,
4909 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004911 if (gHdrPlusClient != nullptr) {
4912 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4913 if (rc != OK) {
4914 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4915 mCameraId, mSensorModeInfo.op_pixel_clk);
4916 pthread_mutex_unlock(&mMutex);
4917 goto error_exit;
4918 }
4919 }
4920
Thierry Strudel3d639192016-09-09 11:52:26 -07004921 /* Set batchmode before initializing channel. Since registerBuffer
4922 * internally initializes some of the channels, better set batchmode
4923 * even before first register buffer */
4924 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4925 it != mStreamInfo.end(); it++) {
4926 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4927 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4928 && mBatchSize) {
4929 rc = channel->setBatchSize(mBatchSize);
4930 //Disable per frame map unmap for HFR/batchmode case
4931 rc |= channel->setPerFrameMapUnmap(false);
4932 if (NO_ERROR != rc) {
4933 LOGE("Channel init failed %d", rc);
4934 pthread_mutex_unlock(&mMutex);
4935 goto error_exit;
4936 }
4937 }
4938 }
4939
4940 //First initialize all streams
4941 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4942 it != mStreamInfo.end(); it++) {
4943 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4944 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4945 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 setEis) {
4947 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4948 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4949 is_type = mStreamConfigInfo.is_type[i];
4950 break;
4951 }
4952 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004954 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 rc = channel->initialize(IS_TYPE_NONE);
4956 }
4957 if (NO_ERROR != rc) {
4958 LOGE("Channel initialization failed %d", rc);
4959 pthread_mutex_unlock(&mMutex);
4960 goto error_exit;
4961 }
4962 }
4963
4964 if (mRawDumpChannel) {
4965 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4966 if (rc != NO_ERROR) {
4967 LOGE("Error: Raw Dump Channel init failed");
4968 pthread_mutex_unlock(&mMutex);
4969 goto error_exit;
4970 }
4971 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004972 if (mHdrPlusRawSrcChannel) {
4973 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4974 if (rc != NO_ERROR) {
4975 LOGE("Error: HDR+ RAW Source Channel init failed");
4976 pthread_mutex_unlock(&mMutex);
4977 goto error_exit;
4978 }
4979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 if (mSupportChannel) {
4981 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4982 if (rc < 0) {
4983 LOGE("Support channel initialization failed");
4984 pthread_mutex_unlock(&mMutex);
4985 goto error_exit;
4986 }
4987 }
4988 if (mAnalysisChannel) {
4989 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4990 if (rc < 0) {
4991 LOGE("Analysis channel initialization failed");
4992 pthread_mutex_unlock(&mMutex);
4993 goto error_exit;
4994 }
4995 }
4996 if (mDummyBatchChannel) {
4997 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4998 if (rc < 0) {
4999 LOGE("mDummyBatchChannel setBatchSize failed");
5000 pthread_mutex_unlock(&mMutex);
5001 goto error_exit;
5002 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005003 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (rc < 0) {
5005 LOGE("mDummyBatchChannel initialization failed");
5006 pthread_mutex_unlock(&mMutex);
5007 goto error_exit;
5008 }
5009 }
5010
5011 // Set bundle info
5012 rc = setBundleInfo();
5013 if (rc < 0) {
5014 LOGE("setBundleInfo failed %d", rc);
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018
5019 //update settings from app here
5020 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5021 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5022 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5023 }
5024 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5025 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5026 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5027 }
5028 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5029 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5030 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5031
5032 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5033 (mLinkedCameraId != mCameraId) ) {
5034 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5035 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 goto error_exit;
5038 }
5039 }
5040
5041 // add bundle related cameras
5042 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5043 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005044 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5045 &m_pDualCamCmdPtr->bundle_info;
5046 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 if (mIsDeviceLinked)
5048 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5049 else
5050 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5051
5052 pthread_mutex_lock(&gCamLock);
5053
5054 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5055 LOGE("Dualcam: Invalid Session Id ");
5056 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005057 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 goto error_exit;
5059 }
5060
5061 if (mIsMainCamera == 1) {
5062 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5063 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005064 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005065 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 // related session id should be session id of linked session
5067 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5068 } else {
5069 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5070 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005071 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005072 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5074 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005075 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 pthread_mutex_unlock(&gCamLock);
5077
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005078 rc = mCameraHandle->ops->set_dual_cam_cmd(
5079 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 if (rc < 0) {
5081 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005082 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005083 goto error_exit;
5084 }
5085 }
5086
5087 //Then start them.
5088 LOGH("Start META Channel");
5089 rc = mMetadataChannel->start();
5090 if (rc < 0) {
5091 LOGE("META channel start failed");
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095
5096 if (mAnalysisChannel) {
5097 rc = mAnalysisChannel->start();
5098 if (rc < 0) {
5099 LOGE("Analysis channel start failed");
5100 mMetadataChannel->stop();
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105
5106 if (mSupportChannel) {
5107 rc = mSupportChannel->start();
5108 if (rc < 0) {
5109 LOGE("Support channel start failed");
5110 mMetadataChannel->stop();
5111 /* Although support and analysis are mutually exclusive today
5112 adding it in anycase for future proofing */
5113 if (mAnalysisChannel) {
5114 mAnalysisChannel->stop();
5115 }
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
5120 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5121 it != mStreamInfo.end(); it++) {
5122 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5123 LOGH("Start Processing Channel mask=%d",
5124 channel->getStreamTypeMask());
5125 rc = channel->start();
5126 if (rc < 0) {
5127 LOGE("channel start failed");
5128 pthread_mutex_unlock(&mMutex);
5129 goto error_exit;
5130 }
5131 }
5132
5133 if (mRawDumpChannel) {
5134 LOGD("Starting raw dump stream");
5135 rc = mRawDumpChannel->start();
5136 if (rc != NO_ERROR) {
5137 LOGE("Error Starting Raw Dump Channel");
5138 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5139 it != mStreamInfo.end(); it++) {
5140 QCamera3Channel *channel =
5141 (QCamera3Channel *)(*it)->stream->priv;
5142 LOGH("Stopping Processing Channel mask=%d",
5143 channel->getStreamTypeMask());
5144 channel->stop();
5145 }
5146 if (mSupportChannel)
5147 mSupportChannel->stop();
5148 if (mAnalysisChannel) {
5149 mAnalysisChannel->stop();
5150 }
5151 mMetadataChannel->stop();
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156
5157 if (mChannelHandle) {
5158
5159 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5160 mChannelHandle);
5161 if (rc != NO_ERROR) {
5162 LOGE("start_channel failed %d", rc);
5163 pthread_mutex_unlock(&mMutex);
5164 goto error_exit;
5165 }
5166 }
5167
5168 goto no_error;
5169error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005170 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 return rc;
5172no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 mWokenUpByDaemon = false;
5174 mPendingLiveRequest = 0;
5175 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 }
5177
Chien-Yu Chenee335912017-02-09 17:53:20 -08005178 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005179 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005180 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5181 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5182 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5183 rc = enableHdrPlusModeLocked();
5184 if (rc != OK) {
5185 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5186 pthread_mutex_unlock(&mMutex);
5187 return rc;
5188 }
5189
5190 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5191 if (mHdrPlusRawSrcChannel) {
5192 rc = mHdrPlusRawSrcChannel->start();
5193 if (rc != OK) {
5194 LOGE("Error Starting HDR+ RAW Channel");
5195 pthread_mutex_unlock(&mMutex);
5196 return rc;
5197 }
5198 }
5199 mFirstPreviewIntentSeen = true;
5200 }
5201
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005203 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005204
5205 if (mFlushPerf) {
5206 //we cannot accept any requests during flush
5207 LOGE("process_capture_request cannot proceed during flush");
5208 pthread_mutex_unlock(&mMutex);
5209 return NO_ERROR; //should return an error
5210 }
5211
5212 if (meta.exists(ANDROID_REQUEST_ID)) {
5213 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5214 mCurrentRequestId = request_id;
5215 LOGD("Received request with id: %d", request_id);
5216 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5217 LOGE("Unable to find request id field, \
5218 & no previous id available");
5219 pthread_mutex_unlock(&mMutex);
5220 return NAME_NOT_FOUND;
5221 } else {
5222 LOGD("Re-using old request id");
5223 request_id = mCurrentRequestId;
5224 }
5225
5226 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5227 request->num_output_buffers,
5228 request->input_buffer,
5229 frameNumber);
5230 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005231 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005233 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 uint32_t snapshotStreamId = 0;
5235 for (size_t i = 0; i < request->num_output_buffers; i++) {
5236 const camera3_stream_buffer_t& output = request->output_buffers[i];
5237 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5238
Emilian Peev7650c122017-01-19 08:24:33 -08005239 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5240 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005241 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 blob_request = 1;
5243 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5244 }
5245
5246 if (output.acquire_fence != -1) {
5247 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5248 close(output.acquire_fence);
5249 if (rc != OK) {
5250 LOGE("sync wait failed %d", rc);
5251 pthread_mutex_unlock(&mMutex);
5252 return rc;
5253 }
5254 }
5255
Emilian Peev0f3c3162017-03-15 12:57:46 +00005256 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5257 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005258 depthRequestPresent = true;
5259 continue;
5260 }
5261
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005264
5265 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5266 isVidBufRequested = true;
5267 }
5268 }
5269
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005270 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5271 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5272 itr++) {
5273 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5274 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5275 channel->getStreamID(channel->getStreamTypeMask());
5276
5277 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5278 isVidBufRequested = true;
5279 }
5280 }
5281
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005284 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 }
5286 if (blob_request && mRawDumpChannel) {
5287 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005288 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005289 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 }
5292
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005293 {
5294 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5295 // Request a RAW buffer if
5296 // 1. mHdrPlusRawSrcChannel is valid.
5297 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5298 // 3. There is no pending HDR+ request.
5299 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5300 mHdrPlusPendingRequests.size() == 0) {
5301 streamsArray.stream_request[streamsArray.num_streams].streamID =
5302 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5303 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5304 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005305 }
5306
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005307 //extract capture intent
5308 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5309 mCaptureIntent =
5310 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5311 }
5312
5313 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5314 mCacMode =
5315 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5316 }
5317
5318 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005319 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005320
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005321 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005322 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005323 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5324 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005325 }
5326
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005327 if (hdrPlusRequest) {
5328 // For a HDR+ request, just set the frame parameters.
5329 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5330 if (rc < 0) {
5331 LOGE("fail to set frame parameters");
5332 pthread_mutex_unlock(&mMutex);
5333 return rc;
5334 }
5335 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 /* Parse the settings:
5337 * - For every request in NORMAL MODE
5338 * - For every request in HFR mode during preview only case
5339 * - For first request of every batch in HFR mode during video
5340 * recording. In batchmode the same settings except frame number is
5341 * repeated in each request of the batch.
5342 */
5343 if (!mBatchSize ||
5344 (mBatchSize && !isVidBufRequested) ||
5345 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 if (rc < 0) {
5348 LOGE("fail to set frame parameters");
5349 pthread_mutex_unlock(&mMutex);
5350 return rc;
5351 }
5352 }
5353 /* For batchMode HFR, setFrameParameters is not called for every
5354 * request. But only frame number of the latest request is parsed.
5355 * Keep track of first and last frame numbers in a batch so that
5356 * metadata for the frame numbers of batch can be duplicated in
5357 * handleBatchMetadta */
5358 if (mBatchSize) {
5359 if (!mToBeQueuedVidBufs) {
5360 //start of the batch
5361 mFirstFrameNumberInBatch = request->frame_number;
5362 }
5363 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5364 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5365 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005366 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005367 return BAD_VALUE;
5368 }
5369 }
5370 if (mNeedSensorRestart) {
5371 /* Unlock the mutex as restartSensor waits on the channels to be
5372 * stopped, which in turn calls stream callback functions -
5373 * handleBufferWithLock and handleMetadataWithLock */
5374 pthread_mutex_unlock(&mMutex);
5375 rc = dynamicUpdateMetaStreamInfo();
5376 if (rc != NO_ERROR) {
5377 LOGE("Restarting the sensor failed");
5378 return BAD_VALUE;
5379 }
5380 mNeedSensorRestart = false;
5381 pthread_mutex_lock(&mMutex);
5382 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005383 if(mResetInstantAEC) {
5384 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5385 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5386 mResetInstantAEC = false;
5387 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005388 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 if (request->input_buffer->acquire_fence != -1) {
5390 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5391 close(request->input_buffer->acquire_fence);
5392 if (rc != OK) {
5393 LOGE("input buffer sync wait failed %d", rc);
5394 pthread_mutex_unlock(&mMutex);
5395 return rc;
5396 }
5397 }
5398 }
5399
5400 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5401 mLastCustIntentFrmNum = frameNumber;
5402 }
5403 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005404 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 pendingRequestIterator latestRequest;
5406 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005407 pendingRequest.num_buffers = depthRequestPresent ?
5408 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 pendingRequest.request_id = request_id;
5410 pendingRequest.blob_request = blob_request;
5411 pendingRequest.timestamp = 0;
5412 pendingRequest.bUrgentReceived = 0;
5413 if (request->input_buffer) {
5414 pendingRequest.input_buffer =
5415 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5416 *(pendingRequest.input_buffer) = *(request->input_buffer);
5417 pInputBuffer = pendingRequest.input_buffer;
5418 } else {
5419 pendingRequest.input_buffer = NULL;
5420 pInputBuffer = NULL;
5421 }
5422
5423 pendingRequest.pipeline_depth = 0;
5424 pendingRequest.partial_result_cnt = 0;
5425 extractJpegMetadata(mCurJpegMeta, request);
5426 pendingRequest.jpegMetadata = mCurJpegMeta;
5427 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5428 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005430 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5431 mHybridAeEnable =
5432 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5433 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005434
5435 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5436 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005437 /* DevCamDebug metadata processCaptureRequest */
5438 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5439 mDevCamDebugMetaEnable =
5440 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5441 }
5442 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5443 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005444
5445 //extract CAC info
5446 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5447 mCacMode =
5448 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5449 }
5450 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005451 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005452
5453 PendingBuffersInRequest bufsForCurRequest;
5454 bufsForCurRequest.frame_number = frameNumber;
5455 // Mark current timestamp for the new request
5456 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005458
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005459 if (hdrPlusRequest) {
5460 // Save settings for this request.
5461 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5462 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5463
5464 // Add to pending HDR+ request queue.
5465 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5466 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5467
5468 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5469 }
5470
Thierry Strudel3d639192016-09-09 11:52:26 -07005471 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005472 if ((request->output_buffers[i].stream->data_space ==
5473 HAL_DATASPACE_DEPTH) &&
5474 (HAL_PIXEL_FORMAT_BLOB ==
5475 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005476 continue;
5477 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005478 RequestedBufferInfo requestedBuf;
5479 memset(&requestedBuf, 0, sizeof(requestedBuf));
5480 requestedBuf.stream = request->output_buffers[i].stream;
5481 requestedBuf.buffer = NULL;
5482 pendingRequest.buffers.push_back(requestedBuf);
5483
5484 // Add to buffer handle the pending buffers list
5485 PendingBufferInfo bufferInfo;
5486 bufferInfo.buffer = request->output_buffers[i].buffer;
5487 bufferInfo.stream = request->output_buffers[i].stream;
5488 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5489 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5490 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5491 frameNumber, bufferInfo.buffer,
5492 channel->getStreamTypeMask(), bufferInfo.stream->format);
5493 }
5494 // Add this request packet into mPendingBuffersMap
5495 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5496 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5497 mPendingBuffersMap.get_num_overall_buffers());
5498
5499 latestRequest = mPendingRequestsList.insert(
5500 mPendingRequestsList.end(), pendingRequest);
5501 if(mFlush) {
5502 LOGI("mFlush is true");
5503 pthread_mutex_unlock(&mMutex);
5504 return NO_ERROR;
5505 }
5506
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005507 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5508 // channel.
5509 if (!hdrPlusRequest) {
5510 int indexUsed;
5511 // Notify metadata channel we receive a request
5512 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005513
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005514 if(request->input_buffer != NULL){
5515 LOGD("Input request, frame_number %d", frameNumber);
5516 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5517 if (NO_ERROR != rc) {
5518 LOGE("fail to set reproc parameters");
5519 pthread_mutex_unlock(&mMutex);
5520 return rc;
5521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005522 }
5523
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005524 // Call request on other streams
5525 uint32_t streams_need_metadata = 0;
5526 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5527 for (size_t i = 0; i < request->num_output_buffers; i++) {
5528 const camera3_stream_buffer_t& output = request->output_buffers[i];
5529 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5530
5531 if (channel == NULL) {
5532 LOGW("invalid channel pointer for stream");
5533 continue;
5534 }
5535
5536 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5537 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5538 output.buffer, request->input_buffer, frameNumber);
5539 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005540 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5542 if (rc < 0) {
5543 LOGE("Fail to request on picture channel");
5544 pthread_mutex_unlock(&mMutex);
5545 return rc;
5546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005547 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005548 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5549 assert(NULL != mDepthChannel);
5550 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551
Emilian Peev7650c122017-01-19 08:24:33 -08005552 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5553 if (rc < 0) {
5554 LOGE("Fail to map on depth buffer");
5555 pthread_mutex_unlock(&mMutex);
5556 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 }
Emilian Peev7650c122017-01-19 08:24:33 -08005558 } else {
5559 LOGD("snapshot request with buffer %p, frame_number %d",
5560 output.buffer, frameNumber);
5561 if (!request->settings) {
5562 rc = channel->request(output.buffer, frameNumber,
5563 NULL, mPrevParameters, indexUsed);
5564 } else {
5565 rc = channel->request(output.buffer, frameNumber,
5566 NULL, mParameters, indexUsed);
5567 }
5568 if (rc < 0) {
5569 LOGE("Fail to request on picture channel");
5570 pthread_mutex_unlock(&mMutex);
5571 return rc;
5572 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573
Emilian Peev7650c122017-01-19 08:24:33 -08005574 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5575 uint32_t j = 0;
5576 for (j = 0; j < streamsArray.num_streams; j++) {
5577 if (streamsArray.stream_request[j].streamID == streamId) {
5578 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5579 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5580 else
5581 streamsArray.stream_request[j].buf_index = indexUsed;
5582 break;
5583 }
5584 }
5585 if (j == streamsArray.num_streams) {
5586 LOGE("Did not find matching stream to update index");
5587 assert(0);
5588 }
5589
5590 pendingBufferIter->need_metadata = true;
5591 streams_need_metadata++;
5592 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005593 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005594 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5595 bool needMetadata = false;
5596 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5597 rc = yuvChannel->request(output.buffer, frameNumber,
5598 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5599 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005601 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005602 pthread_mutex_unlock(&mMutex);
5603 return rc;
5604 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005605
5606 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5607 uint32_t j = 0;
5608 for (j = 0; j < streamsArray.num_streams; j++) {
5609 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5611 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5612 else
5613 streamsArray.stream_request[j].buf_index = indexUsed;
5614 break;
5615 }
5616 }
5617 if (j == streamsArray.num_streams) {
5618 LOGE("Did not find matching stream to update index");
5619 assert(0);
5620 }
5621
5622 pendingBufferIter->need_metadata = needMetadata;
5623 if (needMetadata)
5624 streams_need_metadata += 1;
5625 LOGD("calling YUV channel request, need_metadata is %d",
5626 needMetadata);
5627 } else {
5628 LOGD("request with buffer %p, frame_number %d",
5629 output.buffer, frameNumber);
5630
5631 rc = channel->request(output.buffer, frameNumber, indexUsed);
5632
5633 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5634 uint32_t j = 0;
5635 for (j = 0; j < streamsArray.num_streams; j++) {
5636 if (streamsArray.stream_request[j].streamID == streamId) {
5637 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5638 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5639 else
5640 streamsArray.stream_request[j].buf_index = indexUsed;
5641 break;
5642 }
5643 }
5644 if (j == streamsArray.num_streams) {
5645 LOGE("Did not find matching stream to update index");
5646 assert(0);
5647 }
5648
5649 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5650 && mBatchSize) {
5651 mToBeQueuedVidBufs++;
5652 if (mToBeQueuedVidBufs == mBatchSize) {
5653 channel->queueBatchBuf();
5654 }
5655 }
5656 if (rc < 0) {
5657 LOGE("request failed");
5658 pthread_mutex_unlock(&mMutex);
5659 return rc;
5660 }
5661 }
5662 pendingBufferIter++;
5663 }
5664
5665 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5666 itr++) {
5667 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5668
5669 if (channel == NULL) {
5670 LOGE("invalid channel pointer for stream");
5671 assert(0);
5672 return BAD_VALUE;
5673 }
5674
5675 InternalRequest requestedStream;
5676 requestedStream = (*itr);
5677
5678
5679 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5680 LOGD("snapshot request internally input buffer %p, frame_number %d",
5681 request->input_buffer, frameNumber);
5682 if(request->input_buffer != NULL){
5683 rc = channel->request(NULL, frameNumber,
5684 pInputBuffer, &mReprocMeta, indexUsed, true,
5685 requestedStream.meteringOnly);
5686 if (rc < 0) {
5687 LOGE("Fail to request on picture channel");
5688 pthread_mutex_unlock(&mMutex);
5689 return rc;
5690 }
5691 } else {
5692 LOGD("snapshot request with frame_number %d", frameNumber);
5693 if (!request->settings) {
5694 rc = channel->request(NULL, frameNumber,
5695 NULL, mPrevParameters, indexUsed, true,
5696 requestedStream.meteringOnly);
5697 } else {
5698 rc = channel->request(NULL, frameNumber,
5699 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5700 }
5701 if (rc < 0) {
5702 LOGE("Fail to request on picture channel");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
5706
5707 if ((*itr).meteringOnly != 1) {
5708 requestedStream.need_metadata = 1;
5709 streams_need_metadata++;
5710 }
5711 }
5712
5713 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5714 uint32_t j = 0;
5715 for (j = 0; j < streamsArray.num_streams; j++) {
5716 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005717 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5718 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5719 else
5720 streamsArray.stream_request[j].buf_index = indexUsed;
5721 break;
5722 }
5723 }
5724 if (j == streamsArray.num_streams) {
5725 LOGE("Did not find matching stream to update index");
5726 assert(0);
5727 }
5728
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005729 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005731 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005732 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005733 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005734 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005735 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005736
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005737 //If 2 streams have need_metadata set to true, fail the request, unless
5738 //we copy/reference count the metadata buffer
5739 if (streams_need_metadata > 1) {
5740 LOGE("not supporting request in which two streams requires"
5741 " 2 HAL metadata for reprocessing");
5742 pthread_mutex_unlock(&mMutex);
5743 return -EINVAL;
5744 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005745
Emilian Peev7650c122017-01-19 08:24:33 -08005746 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5748 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5749 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5750 pthread_mutex_unlock(&mMutex);
5751 return BAD_VALUE;
5752 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 if (request->input_buffer == NULL) {
5754 /* Set the parameters to backend:
5755 * - For every request in NORMAL MODE
5756 * - For every request in HFR mode during preview only case
5757 * - Once every batch in HFR mode during video recording
5758 */
5759 if (!mBatchSize ||
5760 (mBatchSize && !isVidBufRequested) ||
5761 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5762 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5763 mBatchSize, isVidBufRequested,
5764 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005765
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005766 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5767 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5768 uint32_t m = 0;
5769 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5770 if (streamsArray.stream_request[k].streamID ==
5771 mBatchedStreamsArray.stream_request[m].streamID)
5772 break;
5773 }
5774 if (m == mBatchedStreamsArray.num_streams) {
5775 mBatchedStreamsArray.stream_request\
5776 [mBatchedStreamsArray.num_streams].streamID =
5777 streamsArray.stream_request[k].streamID;
5778 mBatchedStreamsArray.stream_request\
5779 [mBatchedStreamsArray.num_streams].buf_index =
5780 streamsArray.stream_request[k].buf_index;
5781 mBatchedStreamsArray.num_streams =
5782 mBatchedStreamsArray.num_streams + 1;
5783 }
5784 }
5785 streamsArray = mBatchedStreamsArray;
5786 }
5787 /* Update stream id of all the requested buffers */
5788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5789 streamsArray)) {
5790 LOGE("Failed to set stream type mask in the parameters");
5791 return BAD_VALUE;
5792 }
5793
5794 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5795 mParameters);
5796 if (rc < 0) {
5797 LOGE("set_parms failed");
5798 }
5799 /* reset to zero coz, the batch is queued */
5800 mToBeQueuedVidBufs = 0;
5801 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5802 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5803 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005804 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5805 uint32_t m = 0;
5806 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5807 if (streamsArray.stream_request[k].streamID ==
5808 mBatchedStreamsArray.stream_request[m].streamID)
5809 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005810 }
5811 if (m == mBatchedStreamsArray.num_streams) {
5812 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5813 streamID = streamsArray.stream_request[k].streamID;
5814 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5815 buf_index = streamsArray.stream_request[k].buf_index;
5816 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5817 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005818 }
5819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005821 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005822 }
5823
5824 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5825
5826 mState = STARTED;
5827 // Added a timed condition wait
5828 struct timespec ts;
5829 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005830 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005831 if (rc < 0) {
5832 isValidTimeout = 0;
5833 LOGE("Error reading the real time clock!!");
5834 }
5835 else {
5836 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 int64_t timeout = 5;
5838 {
5839 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5840 // If there is a pending HDR+ request, the following requests may be blocked until the
5841 // HDR+ request is done. So allow a longer timeout.
5842 if (mHdrPlusPendingRequests.size() > 0) {
5843 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5844 }
5845 }
5846 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005847 }
5848 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005849 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005850 (mState != ERROR) && (mState != DEINIT)) {
5851 if (!isValidTimeout) {
5852 LOGD("Blocking on conditional wait");
5853 pthread_cond_wait(&mRequestCond, &mMutex);
5854 }
5855 else {
5856 LOGD("Blocking on timed conditional wait");
5857 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5858 if (rc == ETIMEDOUT) {
5859 rc = -ENODEV;
5860 LOGE("Unblocked on timeout!!!!");
5861 break;
5862 }
5863 }
5864 LOGD("Unblocked");
5865 if (mWokenUpByDaemon) {
5866 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005867 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005868 break;
5869 }
5870 }
5871 pthread_mutex_unlock(&mMutex);
5872
5873 return rc;
5874}
5875
5876/*===========================================================================
5877 * FUNCTION : dump
5878 *
5879 * DESCRIPTION:
5880 *
5881 * PARAMETERS :
5882 *
5883 *
5884 * RETURN :
5885 *==========================================================================*/
5886void QCamera3HardwareInterface::dump(int fd)
5887{
5888 pthread_mutex_lock(&mMutex);
5889 dprintf(fd, "\n Camera HAL3 information Begin \n");
5890
5891 dprintf(fd, "\nNumber of pending requests: %zu \n",
5892 mPendingRequestsList.size());
5893 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5894 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5895 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5896 for(pendingRequestIterator i = mPendingRequestsList.begin();
5897 i != mPendingRequestsList.end(); i++) {
5898 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5899 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5900 i->input_buffer);
5901 }
5902 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5903 mPendingBuffersMap.get_num_overall_buffers());
5904 dprintf(fd, "-------+------------------\n");
5905 dprintf(fd, " Frame | Stream type mask \n");
5906 dprintf(fd, "-------+------------------\n");
5907 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5908 for(auto &j : req.mPendingBufferList) {
5909 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5910 dprintf(fd, " %5d | %11d \n",
5911 req.frame_number, channel->getStreamTypeMask());
5912 }
5913 }
5914 dprintf(fd, "-------+------------------\n");
5915
5916 dprintf(fd, "\nPending frame drop list: %zu\n",
5917 mPendingFrameDropList.size());
5918 dprintf(fd, "-------+-----------\n");
5919 dprintf(fd, " Frame | Stream ID \n");
5920 dprintf(fd, "-------+-----------\n");
5921 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5922 i != mPendingFrameDropList.end(); i++) {
5923 dprintf(fd, " %5d | %9d \n",
5924 i->frame_number, i->stream_ID);
5925 }
5926 dprintf(fd, "-------+-----------\n");
5927
5928 dprintf(fd, "\n Camera HAL3 information End \n");
5929
5930 /* use dumpsys media.camera as trigger to send update debug level event */
5931 mUpdateDebugLevel = true;
5932 pthread_mutex_unlock(&mMutex);
5933 return;
5934}
5935
5936/*===========================================================================
5937 * FUNCTION : flush
5938 *
5939 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5940 * conditionally restarts channels
5941 *
5942 * PARAMETERS :
5943 * @ restartChannels: re-start all channels
5944 *
5945 *
5946 * RETURN :
5947 * 0 on success
5948 * Error code on failure
5949 *==========================================================================*/
5950int QCamera3HardwareInterface::flush(bool restartChannels)
5951{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005952 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005953 int32_t rc = NO_ERROR;
5954
5955 LOGD("Unblocking Process Capture Request");
5956 pthread_mutex_lock(&mMutex);
5957 mFlush = true;
5958 pthread_mutex_unlock(&mMutex);
5959
5960 rc = stopAllChannels();
5961 // unlink of dualcam
5962 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005963 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5964 &m_pDualCamCmdPtr->bundle_info;
5965 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005966 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5967 pthread_mutex_lock(&gCamLock);
5968
5969 if (mIsMainCamera == 1) {
5970 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5971 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005972 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005973 // related session id should be session id of linked session
5974 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5975 } else {
5976 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5977 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005978 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005979 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5980 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005981 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005982 pthread_mutex_unlock(&gCamLock);
5983
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005984 rc = mCameraHandle->ops->set_dual_cam_cmd(
5985 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005986 if (rc < 0) {
5987 LOGE("Dualcam: Unlink failed, but still proceed to close");
5988 }
5989 }
5990
5991 if (rc < 0) {
5992 LOGE("stopAllChannels failed");
5993 return rc;
5994 }
5995 if (mChannelHandle) {
5996 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5997 mChannelHandle);
5998 }
5999
6000 // Reset bundle info
6001 rc = setBundleInfo();
6002 if (rc < 0) {
6003 LOGE("setBundleInfo failed %d", rc);
6004 return rc;
6005 }
6006
6007 // Mutex Lock
6008 pthread_mutex_lock(&mMutex);
6009
6010 // Unblock process_capture_request
6011 mPendingLiveRequest = 0;
6012 pthread_cond_signal(&mRequestCond);
6013
6014 rc = notifyErrorForPendingRequests();
6015 if (rc < 0) {
6016 LOGE("notifyErrorForPendingRequests failed");
6017 pthread_mutex_unlock(&mMutex);
6018 return rc;
6019 }
6020
6021 mFlush = false;
6022
6023 // Start the Streams/Channels
6024 if (restartChannels) {
6025 rc = startAllChannels();
6026 if (rc < 0) {
6027 LOGE("startAllChannels failed");
6028 pthread_mutex_unlock(&mMutex);
6029 return rc;
6030 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006031 if (mChannelHandle) {
6032 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6033 mChannelHandle);
6034 if (rc < 0) {
6035 LOGE("start_channel failed");
6036 pthread_mutex_unlock(&mMutex);
6037 return rc;
6038 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006039 }
6040 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006041 pthread_mutex_unlock(&mMutex);
6042
6043 return 0;
6044}
6045
6046/*===========================================================================
6047 * FUNCTION : flushPerf
6048 *
6049 * DESCRIPTION: This is the performance optimization version of flush that does
6050 * not use stream off, rather flushes the system
6051 *
6052 * PARAMETERS :
6053 *
6054 *
6055 * RETURN : 0 : success
6056 * -EINVAL: input is malformed (device is not valid)
6057 * -ENODEV: if the device has encountered a serious error
6058 *==========================================================================*/
6059int QCamera3HardwareInterface::flushPerf()
6060{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006061 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006062 int32_t rc = 0;
6063 struct timespec timeout;
6064 bool timed_wait = false;
6065
6066 pthread_mutex_lock(&mMutex);
6067 mFlushPerf = true;
6068 mPendingBuffersMap.numPendingBufsAtFlush =
6069 mPendingBuffersMap.get_num_overall_buffers();
6070 LOGD("Calling flush. Wait for %d buffers to return",
6071 mPendingBuffersMap.numPendingBufsAtFlush);
6072
6073 /* send the flush event to the backend */
6074 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6075 if (rc < 0) {
6076 LOGE("Error in flush: IOCTL failure");
6077 mFlushPerf = false;
6078 pthread_mutex_unlock(&mMutex);
6079 return -ENODEV;
6080 }
6081
6082 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6083 LOGD("No pending buffers in HAL, return flush");
6084 mFlushPerf = false;
6085 pthread_mutex_unlock(&mMutex);
6086 return rc;
6087 }
6088
6089 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006090 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006091 if (rc < 0) {
6092 LOGE("Error reading the real time clock, cannot use timed wait");
6093 } else {
6094 timeout.tv_sec += FLUSH_TIMEOUT;
6095 timed_wait = true;
6096 }
6097
6098 //Block on conditional variable
6099 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6100 LOGD("Waiting on mBuffersCond");
6101 if (!timed_wait) {
6102 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6103 if (rc != 0) {
6104 LOGE("pthread_cond_wait failed due to rc = %s",
6105 strerror(rc));
6106 break;
6107 }
6108 } else {
6109 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6110 if (rc != 0) {
6111 LOGE("pthread_cond_timedwait failed due to rc = %s",
6112 strerror(rc));
6113 break;
6114 }
6115 }
6116 }
6117 if (rc != 0) {
6118 mFlushPerf = false;
6119 pthread_mutex_unlock(&mMutex);
6120 return -ENODEV;
6121 }
6122
6123 LOGD("Received buffers, now safe to return them");
6124
6125 //make sure the channels handle flush
6126 //currently only required for the picture channel to release snapshot resources
6127 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6128 it != mStreamInfo.end(); it++) {
6129 QCamera3Channel *channel = (*it)->channel;
6130 if (channel) {
6131 rc = channel->flush();
6132 if (rc) {
6133 LOGE("Flushing the channels failed with error %d", rc);
6134 // even though the channel flush failed we need to continue and
6135 // return the buffers we have to the framework, however the return
6136 // value will be an error
6137 rc = -ENODEV;
6138 }
6139 }
6140 }
6141
6142 /* notify the frameworks and send errored results */
6143 rc = notifyErrorForPendingRequests();
6144 if (rc < 0) {
6145 LOGE("notifyErrorForPendingRequests failed");
6146 pthread_mutex_unlock(&mMutex);
6147 return rc;
6148 }
6149
6150 //unblock process_capture_request
6151 mPendingLiveRequest = 0;
6152 unblockRequestIfNecessary();
6153
6154 mFlushPerf = false;
6155 pthread_mutex_unlock(&mMutex);
6156 LOGD ("Flush Operation complete. rc = %d", rc);
6157 return rc;
6158}
6159
6160/*===========================================================================
6161 * FUNCTION : handleCameraDeviceError
6162 *
6163 * DESCRIPTION: This function calls internal flush and notifies the error to
6164 * framework and updates the state variable.
6165 *
6166 * PARAMETERS : None
6167 *
6168 * RETURN : NO_ERROR on Success
6169 * Error code on failure
6170 *==========================================================================*/
6171int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6172{
6173 int32_t rc = NO_ERROR;
6174
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006175 {
6176 Mutex::Autolock lock(mFlushLock);
6177 pthread_mutex_lock(&mMutex);
6178 if (mState != ERROR) {
6179 //if mState != ERROR, nothing to be done
6180 pthread_mutex_unlock(&mMutex);
6181 return NO_ERROR;
6182 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006184
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006185 rc = flush(false /* restart channels */);
6186 if (NO_ERROR != rc) {
6187 LOGE("internal flush to handle mState = ERROR failed");
6188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006189
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006190 pthread_mutex_lock(&mMutex);
6191 mState = DEINIT;
6192 pthread_mutex_unlock(&mMutex);
6193 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006194
6195 camera3_notify_msg_t notify_msg;
6196 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6197 notify_msg.type = CAMERA3_MSG_ERROR;
6198 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6199 notify_msg.message.error.error_stream = NULL;
6200 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006201 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006202
6203 return rc;
6204}
6205
6206/*===========================================================================
6207 * FUNCTION : captureResultCb
6208 *
6209 * DESCRIPTION: Callback handler for all capture result
6210 * (streams, as well as metadata)
6211 *
6212 * PARAMETERS :
6213 * @metadata : metadata information
6214 * @buffer : actual gralloc buffer to be returned to frameworks.
6215 * NULL if metadata.
6216 *
6217 * RETURN : NONE
6218 *==========================================================================*/
6219void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6220 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6221{
6222 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006223 pthread_mutex_lock(&mMutex);
6224 uint8_t batchSize = mBatchSize;
6225 pthread_mutex_unlock(&mMutex);
6226 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006227 handleBatchMetadata(metadata_buf,
6228 true /* free_and_bufdone_meta_buf */);
6229 } else { /* mBatchSize = 0 */
6230 hdrPlusPerfLock(metadata_buf);
6231 pthread_mutex_lock(&mMutex);
6232 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006233 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006234 true /* last urgent frame of batch metadata */,
6235 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006236 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 pthread_mutex_unlock(&mMutex);
6238 }
6239 } else if (isInputBuffer) {
6240 pthread_mutex_lock(&mMutex);
6241 handleInputBufferWithLock(frame_number);
6242 pthread_mutex_unlock(&mMutex);
6243 } else {
6244 pthread_mutex_lock(&mMutex);
6245 handleBufferWithLock(buffer, frame_number);
6246 pthread_mutex_unlock(&mMutex);
6247 }
6248 return;
6249}
6250
6251/*===========================================================================
6252 * FUNCTION : getReprocessibleOutputStreamId
6253 *
6254 * DESCRIPTION: Get source output stream id for the input reprocess stream
6255 * based on size and format, which would be the largest
6256 * output stream if an input stream exists.
6257 *
6258 * PARAMETERS :
6259 * @id : return the stream id if found
6260 *
6261 * RETURN : int32_t type of status
6262 * NO_ERROR -- success
6263 * none-zero failure code
6264 *==========================================================================*/
6265int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6266{
6267 /* check if any output or bidirectional stream with the same size and format
6268 and return that stream */
6269 if ((mInputStreamInfo.dim.width > 0) &&
6270 (mInputStreamInfo.dim.height > 0)) {
6271 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6272 it != mStreamInfo.end(); it++) {
6273
6274 camera3_stream_t *stream = (*it)->stream;
6275 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6276 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6277 (stream->format == mInputStreamInfo.format)) {
6278 // Usage flag for an input stream and the source output stream
6279 // may be different.
6280 LOGD("Found reprocessible output stream! %p", *it);
6281 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6282 stream->usage, mInputStreamInfo.usage);
6283
6284 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6285 if (channel != NULL && channel->mStreams[0]) {
6286 id = channel->mStreams[0]->getMyServerID();
6287 return NO_ERROR;
6288 }
6289 }
6290 }
6291 } else {
6292 LOGD("No input stream, so no reprocessible output stream");
6293 }
6294 return NAME_NOT_FOUND;
6295}
6296
6297/*===========================================================================
6298 * FUNCTION : lookupFwkName
6299 *
6300 * DESCRIPTION: In case the enum is not same in fwk and backend
6301 * make sure the parameter is correctly propogated
6302 *
6303 * PARAMETERS :
6304 * @arr : map between the two enums
6305 * @len : len of the map
6306 * @hal_name : name of the hal_parm to map
6307 *
6308 * RETURN : int type of status
6309 * fwk_name -- success
6310 * none-zero failure code
6311 *==========================================================================*/
6312template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6313 size_t len, halType hal_name)
6314{
6315
6316 for (size_t i = 0; i < len; i++) {
6317 if (arr[i].hal_name == hal_name) {
6318 return arr[i].fwk_name;
6319 }
6320 }
6321
6322 /* Not able to find matching framework type is not necessarily
6323 * an error case. This happens when mm-camera supports more attributes
6324 * than the frameworks do */
6325 LOGH("Cannot find matching framework type");
6326 return NAME_NOT_FOUND;
6327}
6328
6329/*===========================================================================
6330 * FUNCTION : lookupHalName
6331 *
6332 * DESCRIPTION: In case the enum is not same in fwk and backend
6333 * make sure the parameter is correctly propogated
6334 *
6335 * PARAMETERS :
6336 * @arr : map between the two enums
6337 * @len : len of the map
6338 * @fwk_name : name of the hal_parm to map
6339 *
6340 * RETURN : int32_t type of status
6341 * hal_name -- success
6342 * none-zero failure code
6343 *==========================================================================*/
6344template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6345 size_t len, fwkType fwk_name)
6346{
6347 for (size_t i = 0; i < len; i++) {
6348 if (arr[i].fwk_name == fwk_name) {
6349 return arr[i].hal_name;
6350 }
6351 }
6352
6353 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6354 return NAME_NOT_FOUND;
6355}
6356
6357/*===========================================================================
6358 * FUNCTION : lookupProp
6359 *
6360 * DESCRIPTION: lookup a value by its name
6361 *
6362 * PARAMETERS :
6363 * @arr : map between the two enums
6364 * @len : size of the map
6365 * @name : name to be looked up
6366 *
6367 * RETURN : Value if found
6368 * CAM_CDS_MODE_MAX if not found
6369 *==========================================================================*/
6370template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6371 size_t len, const char *name)
6372{
6373 if (name) {
6374 for (size_t i = 0; i < len; i++) {
6375 if (!strcmp(arr[i].desc, name)) {
6376 return arr[i].val;
6377 }
6378 }
6379 }
6380 return CAM_CDS_MODE_MAX;
6381}
6382
6383/*===========================================================================
6384 *
6385 * DESCRIPTION:
6386 *
6387 * PARAMETERS :
6388 * @metadata : metadata information from callback
6389 * @timestamp: metadata buffer timestamp
6390 * @request_id: request id
6391 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006392 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006393 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6394 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006396 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6397 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006398 *
6399 * RETURN : camera_metadata_t*
6400 * metadata in a format specified by fwk
6401 *==========================================================================*/
6402camera_metadata_t*
6403QCamera3HardwareInterface::translateFromHalMetadata(
6404 metadata_buffer_t *metadata,
6405 nsecs_t timestamp,
6406 int32_t request_id,
6407 const CameraMetadata& jpegMetadata,
6408 uint8_t pipeline_depth,
6409 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006410 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006411 /* DevCamDebug metadata translateFromHalMetadata argument */
6412 uint8_t DevCamDebug_meta_enable,
6413 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006414 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006415 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006416 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006417{
6418 CameraMetadata camMetadata;
6419 camera_metadata_t *resultMetadata;
6420
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006421 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006422 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6423 * Timestamp is needed because it's used for shutter notify calculation.
6424 * */
6425 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6426 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006427 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006428 }
6429
Thierry Strudel3d639192016-09-09 11:52:26 -07006430 if (jpegMetadata.entryCount())
6431 camMetadata.append(jpegMetadata);
6432
6433 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6434 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6435 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6436 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006437 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006438 if (mBatchSize == 0) {
6439 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6440 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6441 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006442
Samuel Ha68ba5172016-12-15 18:41:12 -08006443 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6444 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6445 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6446 // DevCamDebug metadata translateFromHalMetadata AF
6447 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6448 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6449 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6450 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6451 }
6452 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6453 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6454 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6455 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6456 }
6457 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6458 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6459 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6460 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6461 }
6462 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6463 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6464 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6465 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6466 }
6467 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6468 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6469 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6470 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6471 }
6472 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6473 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6474 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6475 *DevCamDebug_af_monitor_pdaf_target_pos;
6476 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6477 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6478 }
6479 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6480 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6481 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6482 *DevCamDebug_af_monitor_pdaf_confidence;
6483 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6484 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6485 }
6486 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6487 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6488 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6489 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6490 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6491 }
6492 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6493 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6494 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6495 *DevCamDebug_af_monitor_tof_target_pos;
6496 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6497 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6498 }
6499 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6500 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6501 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6502 *DevCamDebug_af_monitor_tof_confidence;
6503 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6504 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6505 }
6506 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6507 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6508 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6509 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6510 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6511 }
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6513 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6514 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6515 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6516 &fwk_DevCamDebug_af_monitor_type_select, 1);
6517 }
6518 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6519 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6520 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6521 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6522 &fwk_DevCamDebug_af_monitor_refocus, 1);
6523 }
6524 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6525 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6526 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6527 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6528 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6529 }
6530 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6531 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6532 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6533 *DevCamDebug_af_search_pdaf_target_pos;
6534 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6535 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6536 }
6537 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6538 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6539 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6540 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6541 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6542 }
6543 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6544 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6545 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6546 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6547 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6548 }
6549 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6550 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6551 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6552 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6553 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6554 }
6555 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6556 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6557 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6558 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6559 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6560 }
6561 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6562 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6563 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6564 *DevCamDebug_af_search_tof_target_pos;
6565 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6566 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6567 }
6568 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6569 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6570 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6571 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6572 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6575 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6576 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6577 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6578 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6579 }
6580 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6581 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6582 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6583 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6584 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6585 }
6586 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6587 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6588 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6589 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6590 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6591 }
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6593 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6594 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6595 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6596 &fwk_DevCamDebug_af_search_type_select, 1);
6597 }
6598 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6599 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6600 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6601 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6602 &fwk_DevCamDebug_af_search_next_pos, 1);
6603 }
6604 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6605 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6606 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6607 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6608 &fwk_DevCamDebug_af_search_target_pos, 1);
6609 }
6610 // DevCamDebug metadata translateFromHalMetadata AEC
6611 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6612 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6613 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6614 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6617 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6618 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6619 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6622 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6623 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6624 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6627 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6628 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6629 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6632 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6633 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6634 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6635 }
6636 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6637 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6638 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6639 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6642 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6643 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6644 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6645 }
6646 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6647 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6648 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6649 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6650 }
Samuel Ha34229982017-02-17 13:51:11 -08006651 // DevCamDebug metadata translateFromHalMetadata zzHDR
6652 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6653 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6654 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6655 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6658 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6659 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6660 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6661 }
6662 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6663 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6664 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6665 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6666 }
6667 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6668 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6669 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6670 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6671 }
6672 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6673 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6674 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6675 *DevCamDebug_aec_hdr_sensitivity_ratio;
6676 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6677 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6678 }
6679 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6680 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6681 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6682 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6683 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6684 }
6685 // DevCamDebug metadata translateFromHalMetadata ADRC
6686 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6687 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6688 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6689 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6690 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6691 }
6692 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6693 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6694 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6695 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6696 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6697 }
6698 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6699 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6700 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6701 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6702 }
6703 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6704 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6705 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6706 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6707 }
6708 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6709 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6710 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6711 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6712 }
6713 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6714 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6715 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6716 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6717 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006718 // DevCamDebug metadata translateFromHalMetadata AWB
6719 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6720 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6721 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6722 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6723 }
6724 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6725 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6726 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6727 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6728 }
6729 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6730 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6731 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6732 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6733 }
6734 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6735 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6736 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6737 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6740 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6741 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6742 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6743 }
6744 }
6745 // atrace_end(ATRACE_TAG_ALWAYS);
6746
Thierry Strudel3d639192016-09-09 11:52:26 -07006747 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6748 int64_t fwk_frame_number = *frame_number;
6749 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6750 }
6751
6752 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6753 int32_t fps_range[2];
6754 fps_range[0] = (int32_t)float_range->min_fps;
6755 fps_range[1] = (int32_t)float_range->max_fps;
6756 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6757 fps_range, 2);
6758 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6759 fps_range[0], fps_range[1]);
6760 }
6761
6762 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6763 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6764 }
6765
6766 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6767 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6768 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6769 *sceneMode);
6770 if (NAME_NOT_FOUND != val) {
6771 uint8_t fwkSceneMode = (uint8_t)val;
6772 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6773 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6774 fwkSceneMode);
6775 }
6776 }
6777
6778 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6779 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6780 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6781 }
6782
6783 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6784 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6785 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6786 }
6787
6788 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6789 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6790 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6791 }
6792
6793 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6794 CAM_INTF_META_EDGE_MODE, metadata) {
6795 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6796 }
6797
6798 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6799 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6800 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6801 }
6802
6803 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6804 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6805 }
6806
6807 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6808 if (0 <= *flashState) {
6809 uint8_t fwk_flashState = (uint8_t) *flashState;
6810 if (!gCamCapability[mCameraId]->flash_available) {
6811 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6812 }
6813 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6814 }
6815 }
6816
6817 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6818 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6819 if (NAME_NOT_FOUND != val) {
6820 uint8_t fwk_flashMode = (uint8_t)val;
6821 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6822 }
6823 }
6824
6825 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6826 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6827 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6828 }
6829
6830 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6831 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6832 }
6833
6834 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6835 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6836 }
6837
6838 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6839 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6840 }
6841
6842 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6843 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6844 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6845 }
6846
6847 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6848 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6849 LOGD("fwk_videoStab = %d", fwk_videoStab);
6850 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6851 } else {
6852 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6853 // and so hardcoding the Video Stab result to OFF mode.
6854 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6855 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006856 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006857 }
6858
6859 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6860 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6861 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6862 }
6863
6864 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6865 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6866 }
6867
Thierry Strudel3d639192016-09-09 11:52:26 -07006868 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6869 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006870 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006871
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006872 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6873 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006874
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006875 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006876 blackLevelAppliedPattern->cam_black_level[0],
6877 blackLevelAppliedPattern->cam_black_level[1],
6878 blackLevelAppliedPattern->cam_black_level[2],
6879 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006880 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6881 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006882
6883#ifndef USE_HAL_3_3
6884 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006885 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6886 // depth space.
6887 fwk_blackLevelInd[0] /= 4.0;
6888 fwk_blackLevelInd[1] /= 4.0;
6889 fwk_blackLevelInd[2] /= 4.0;
6890 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006891 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6892 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006893#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006894 }
6895
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006896#ifndef USE_HAL_3_3
6897 // Fixed whitelevel is used by ISP/Sensor
6898 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6899 &gCamCapability[mCameraId]->white_level, 1);
6900#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006901
6902 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6903 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6904 int32_t scalerCropRegion[4];
6905 scalerCropRegion[0] = hScalerCropRegion->left;
6906 scalerCropRegion[1] = hScalerCropRegion->top;
6907 scalerCropRegion[2] = hScalerCropRegion->width;
6908 scalerCropRegion[3] = hScalerCropRegion->height;
6909
6910 // Adjust crop region from sensor output coordinate system to active
6911 // array coordinate system.
6912 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6913 scalerCropRegion[2], scalerCropRegion[3]);
6914
6915 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6916 }
6917
6918 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6919 LOGD("sensorExpTime = %lld", *sensorExpTime);
6920 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6921 }
6922
6923 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6924 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6925 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6926 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6927 }
6928
6929 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6930 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6931 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6932 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6933 sensorRollingShutterSkew, 1);
6934 }
6935
6936 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6937 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6938 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6939
6940 //calculate the noise profile based on sensitivity
6941 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6942 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6943 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6944 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6945 noise_profile[i] = noise_profile_S;
6946 noise_profile[i+1] = noise_profile_O;
6947 }
6948 LOGD("noise model entry (S, O) is (%f, %f)",
6949 noise_profile_S, noise_profile_O);
6950 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6951 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6952 }
6953
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006954#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006955 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006956 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006957 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006958 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006959 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6960 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6961 }
6962 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006963#endif
6964
Thierry Strudel3d639192016-09-09 11:52:26 -07006965 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6966 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6967 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6968 }
6969
6970 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6971 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6972 *faceDetectMode);
6973 if (NAME_NOT_FOUND != val) {
6974 uint8_t fwk_faceDetectMode = (uint8_t)val;
6975 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6976
6977 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6978 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6979 CAM_INTF_META_FACE_DETECTION, metadata) {
6980 uint8_t numFaces = MIN(
6981 faceDetectionInfo->num_faces_detected, MAX_ROI);
6982 int32_t faceIds[MAX_ROI];
6983 uint8_t faceScores[MAX_ROI];
6984 int32_t faceRectangles[MAX_ROI * 4];
6985 int32_t faceLandmarks[MAX_ROI * 6];
6986 size_t j = 0, k = 0;
6987
6988 for (size_t i = 0; i < numFaces; i++) {
6989 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6990 // Adjust crop region from sensor output coordinate system to active
6991 // array coordinate system.
6992 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6993 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6994 rect.width, rect.height);
6995
6996 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6997 faceRectangles+j, -1);
6998
6999 j+= 4;
7000 }
7001 if (numFaces <= 0) {
7002 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7003 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7004 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7005 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7006 }
7007
7008 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7009 numFaces);
7010 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7011 faceRectangles, numFaces * 4U);
7012 if (fwk_faceDetectMode ==
7013 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7014 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7015 CAM_INTF_META_FACE_LANDMARK, metadata) {
7016
7017 for (size_t i = 0; i < numFaces; i++) {
7018 // Map the co-ordinate sensor output coordinate system to active
7019 // array coordinate system.
7020 mCropRegionMapper.toActiveArray(
7021 landmarks->face_landmarks[i].left_eye_center.x,
7022 landmarks->face_landmarks[i].left_eye_center.y);
7023 mCropRegionMapper.toActiveArray(
7024 landmarks->face_landmarks[i].right_eye_center.x,
7025 landmarks->face_landmarks[i].right_eye_center.y);
7026 mCropRegionMapper.toActiveArray(
7027 landmarks->face_landmarks[i].mouth_center.x,
7028 landmarks->face_landmarks[i].mouth_center.y);
7029
7030 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007031 k+= TOTAL_LANDMARK_INDICES;
7032 }
7033 } else {
7034 for (size_t i = 0; i < numFaces; i++) {
7035 setInvalidLandmarks(faceLandmarks+k);
7036 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007037 }
7038 }
7039
7040 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7041 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7042 faceLandmarks, numFaces * 6U);
7043 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007044 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7045 CAM_INTF_META_FACE_BLINK, metadata) {
7046 uint8_t detected[MAX_ROI];
7047 uint8_t degree[MAX_ROI * 2];
7048 for (size_t i = 0; i < numFaces; i++) {
7049 detected[i] = blinks->blink[i].blink_detected;
7050 degree[2 * i] = blinks->blink[i].left_blink;
7051 degree[2 * i + 1] = blinks->blink[i].right_blink;
7052 }
7053 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7054 detected, numFaces);
7055 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7056 degree, numFaces * 2);
7057 }
7058 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7059 CAM_INTF_META_FACE_SMILE, metadata) {
7060 uint8_t degree[MAX_ROI];
7061 uint8_t confidence[MAX_ROI];
7062 for (size_t i = 0; i < numFaces; i++) {
7063 degree[i] = smiles->smile[i].smile_degree;
7064 confidence[i] = smiles->smile[i].smile_confidence;
7065 }
7066 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7067 degree, numFaces);
7068 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7069 confidence, numFaces);
7070 }
7071 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7072 CAM_INTF_META_FACE_GAZE, metadata) {
7073 int8_t angle[MAX_ROI];
7074 int32_t direction[MAX_ROI * 3];
7075 int8_t degree[MAX_ROI * 2];
7076 for (size_t i = 0; i < numFaces; i++) {
7077 angle[i] = gazes->gaze[i].gaze_angle;
7078 direction[3 * i] = gazes->gaze[i].updown_dir;
7079 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7080 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7081 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7082 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7083 }
7084 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7085 (uint8_t *)angle, numFaces);
7086 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7087 direction, numFaces * 3);
7088 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7089 (uint8_t *)degree, numFaces * 2);
7090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007091 }
7092 }
7093 }
7094 }
7095
7096 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7097 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007098 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007099 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007100 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007101
Shuzhen Wang14415f52016-11-16 18:26:18 -08007102 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7103 histogramBins = *histBins;
7104 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7105 }
7106
7107 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007108 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7109 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007110 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007111
7112 switch (stats_data->type) {
7113 case CAM_HISTOGRAM_TYPE_BAYER:
7114 switch (stats_data->bayer_stats.data_type) {
7115 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007116 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7117 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007118 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007119 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7120 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007121 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007122 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7123 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007125 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007126 case CAM_STATS_CHANNEL_R:
7127 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007128 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7129 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130 }
7131 break;
7132 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007133 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 break;
7135 }
7136
Shuzhen Wang14415f52016-11-16 18:26:18 -08007137 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007138 }
7139 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007140 }
7141
7142 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7143 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7144 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7145 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7146 }
7147
7148 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7149 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7150 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7151 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7152 }
7153
7154 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7155 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7156 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7157 CAM_MAX_SHADING_MAP_HEIGHT);
7158 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7159 CAM_MAX_SHADING_MAP_WIDTH);
7160 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7161 lensShadingMap->lens_shading, 4U * map_width * map_height);
7162 }
7163
7164 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7165 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7166 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7167 }
7168
7169 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7170 //Populate CAM_INTF_META_TONEMAP_CURVES
7171 /* ch0 = G, ch 1 = B, ch 2 = R*/
7172 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7173 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7174 tonemap->tonemap_points_cnt,
7175 CAM_MAX_TONEMAP_CURVE_SIZE);
7176 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7177 }
7178
7179 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7180 &tonemap->curves[0].tonemap_points[0][0],
7181 tonemap->tonemap_points_cnt * 2);
7182
7183 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7184 &tonemap->curves[1].tonemap_points[0][0],
7185 tonemap->tonemap_points_cnt * 2);
7186
7187 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7188 &tonemap->curves[2].tonemap_points[0][0],
7189 tonemap->tonemap_points_cnt * 2);
7190 }
7191
7192 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7193 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7194 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7195 CC_GAIN_MAX);
7196 }
7197
7198 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7199 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7200 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7201 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7202 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7203 }
7204
7205 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7206 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7207 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7208 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7209 toneCurve->tonemap_points_cnt,
7210 CAM_MAX_TONEMAP_CURVE_SIZE);
7211 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7212 }
7213 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7214 (float*)toneCurve->curve.tonemap_points,
7215 toneCurve->tonemap_points_cnt * 2);
7216 }
7217
7218 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7219 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7220 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7221 predColorCorrectionGains->gains, 4);
7222 }
7223
7224 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7225 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7226 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7227 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7228 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7229 }
7230
7231 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7232 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7233 }
7234
7235 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7236 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7237 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7238 }
7239
7240 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7241 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7242 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7243 }
7244
7245 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7246 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7247 *effectMode);
7248 if (NAME_NOT_FOUND != val) {
7249 uint8_t fwk_effectMode = (uint8_t)val;
7250 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7251 }
7252 }
7253
7254 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7255 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7256 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7257 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7258 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7259 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7260 }
7261 int32_t fwk_testPatternData[4];
7262 fwk_testPatternData[0] = testPatternData->r;
7263 fwk_testPatternData[3] = testPatternData->b;
7264 switch (gCamCapability[mCameraId]->color_arrangement) {
7265 case CAM_FILTER_ARRANGEMENT_RGGB:
7266 case CAM_FILTER_ARRANGEMENT_GRBG:
7267 fwk_testPatternData[1] = testPatternData->gr;
7268 fwk_testPatternData[2] = testPatternData->gb;
7269 break;
7270 case CAM_FILTER_ARRANGEMENT_GBRG:
7271 case CAM_FILTER_ARRANGEMENT_BGGR:
7272 fwk_testPatternData[2] = testPatternData->gr;
7273 fwk_testPatternData[1] = testPatternData->gb;
7274 break;
7275 default:
7276 LOGE("color arrangement %d is not supported",
7277 gCamCapability[mCameraId]->color_arrangement);
7278 break;
7279 }
7280 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7281 }
7282
7283 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7284 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7285 }
7286
7287 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7288 String8 str((const char *)gps_methods);
7289 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7290 }
7291
7292 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7293 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7294 }
7295
7296 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7297 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7298 }
7299
7300 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7301 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7302 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7303 }
7304
7305 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7306 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7307 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7308 }
7309
7310 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7311 int32_t fwk_thumb_size[2];
7312 fwk_thumb_size[0] = thumb_size->width;
7313 fwk_thumb_size[1] = thumb_size->height;
7314 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7315 }
7316
7317 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7318 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7319 privateData,
7320 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7321 }
7322
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007323 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007324 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007325 meteringMode, 1);
7326 }
7327
Thierry Strudel54dc9782017-02-15 12:12:10 -08007328 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7329 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7330 LOGD("hdr_scene_data: %d %f\n",
7331 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7332 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7333 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7334 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7335 &isHdr, 1);
7336 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7337 &isHdrConfidence, 1);
7338 }
7339
7340
7341
Thierry Strudel3d639192016-09-09 11:52:26 -07007342 if (metadata->is_tuning_params_valid) {
7343 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7344 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7345 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7346
7347
7348 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7349 sizeof(uint32_t));
7350 data += sizeof(uint32_t);
7351
7352 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7353 sizeof(uint32_t));
7354 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7355 data += sizeof(uint32_t);
7356
7357 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7358 sizeof(uint32_t));
7359 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7360 data += sizeof(uint32_t);
7361
7362 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7363 sizeof(uint32_t));
7364 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7365 data += sizeof(uint32_t);
7366
7367 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7368 sizeof(uint32_t));
7369 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7370 data += sizeof(uint32_t);
7371
7372 metadata->tuning_params.tuning_mod3_data_size = 0;
7373 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7374 sizeof(uint32_t));
7375 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7376 data += sizeof(uint32_t);
7377
7378 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7379 TUNING_SENSOR_DATA_MAX);
7380 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7381 count);
7382 data += count;
7383
7384 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7385 TUNING_VFE_DATA_MAX);
7386 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7387 count);
7388 data += count;
7389
7390 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7391 TUNING_CPP_DATA_MAX);
7392 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7393 count);
7394 data += count;
7395
7396 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7397 TUNING_CAC_DATA_MAX);
7398 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7399 count);
7400 data += count;
7401
7402 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7403 (int32_t *)(void *)tuning_meta_data_blob,
7404 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7405 }
7406
7407 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7408 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7409 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7410 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7411 NEUTRAL_COL_POINTS);
7412 }
7413
7414 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7415 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7416 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7417 }
7418
7419 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7420 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7421 // Adjust crop region from sensor output coordinate system to active
7422 // array coordinate system.
7423 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7424 hAeRegions->rect.width, hAeRegions->rect.height);
7425
7426 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7427 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7428 REGIONS_TUPLE_COUNT);
7429 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7430 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7431 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7432 hAeRegions->rect.height);
7433 }
7434
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007435 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7436 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7437 if (NAME_NOT_FOUND != val) {
7438 uint8_t fwkAfMode = (uint8_t)val;
7439 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7440 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7441 } else {
7442 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7443 val);
7444 }
7445 }
7446
Thierry Strudel3d639192016-09-09 11:52:26 -07007447 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7448 uint8_t fwk_afState = (uint8_t) *afState;
7449 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007450 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007451 }
7452
7453 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7454 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7455 }
7456
7457 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7458 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7459 }
7460
7461 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7462 uint8_t fwk_lensState = *lensState;
7463 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7464 }
7465
Thierry Strudel3d639192016-09-09 11:52:26 -07007466
7467 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007468 uint32_t ab_mode = *hal_ab_mode;
7469 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7470 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7471 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7472 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007473 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007474 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007475 if (NAME_NOT_FOUND != val) {
7476 uint8_t fwk_ab_mode = (uint8_t)val;
7477 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7478 }
7479 }
7480
7481 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7482 int val = lookupFwkName(SCENE_MODES_MAP,
7483 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7484 if (NAME_NOT_FOUND != val) {
7485 uint8_t fwkBestshotMode = (uint8_t)val;
7486 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7487 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7488 } else {
7489 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7490 }
7491 }
7492
7493 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7494 uint8_t fwk_mode = (uint8_t) *mode;
7495 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7496 }
7497
7498 /* Constant metadata values to be update*/
7499 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7500 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7501
7502 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7503 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7504
7505 int32_t hotPixelMap[2];
7506 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7507
7508 // CDS
7509 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7510 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7511 }
7512
Thierry Strudel04e026f2016-10-10 11:27:36 -07007513 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7514 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007515 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007516 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7517 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7518 } else {
7519 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7520 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007521
7522 if(fwk_hdr != curr_hdr_state) {
7523 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7524 if(fwk_hdr)
7525 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7526 else
7527 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7528 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007529 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7530 }
7531
Thierry Strudel54dc9782017-02-15 12:12:10 -08007532 //binning correction
7533 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7534 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7535 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7536 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7537 }
7538
Thierry Strudel04e026f2016-10-10 11:27:36 -07007539 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007540 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007541 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7542 int8_t is_ir_on = 0;
7543
7544 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7545 if(is_ir_on != curr_ir_state) {
7546 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7547 if(is_ir_on)
7548 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7549 else
7550 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7551 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007552 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007553 }
7554
Thierry Strudel269c81a2016-10-12 12:13:59 -07007555 // AEC SPEED
7556 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7557 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7558 }
7559
7560 // AWB SPEED
7561 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7562 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7563 }
7564
Thierry Strudel3d639192016-09-09 11:52:26 -07007565 // TNR
7566 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7567 uint8_t tnr_enable = tnr->denoise_enable;
7568 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007569 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7570 int8_t is_tnr_on = 0;
7571
7572 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7573 if(is_tnr_on != curr_tnr_state) {
7574 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7575 if(is_tnr_on)
7576 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7577 else
7578 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007580
7581 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7582 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7583 }
7584
7585 // Reprocess crop data
7586 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7587 uint8_t cnt = crop_data->num_of_streams;
7588 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7589 // mm-qcamera-daemon only posts crop_data for streams
7590 // not linked to pproc. So no valid crop metadata is not
7591 // necessarily an error case.
7592 LOGD("No valid crop metadata entries");
7593 } else {
7594 uint32_t reproc_stream_id;
7595 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7596 LOGD("No reprocessible stream found, ignore crop data");
7597 } else {
7598 int rc = NO_ERROR;
7599 Vector<int32_t> roi_map;
7600 int32_t *crop = new int32_t[cnt*4];
7601 if (NULL == crop) {
7602 rc = NO_MEMORY;
7603 }
7604 if (NO_ERROR == rc) {
7605 int32_t streams_found = 0;
7606 for (size_t i = 0; i < cnt; i++) {
7607 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7608 if (pprocDone) {
7609 // HAL already does internal reprocessing,
7610 // either via reprocessing before JPEG encoding,
7611 // or offline postprocessing for pproc bypass case.
7612 crop[0] = 0;
7613 crop[1] = 0;
7614 crop[2] = mInputStreamInfo.dim.width;
7615 crop[3] = mInputStreamInfo.dim.height;
7616 } else {
7617 crop[0] = crop_data->crop_info[i].crop.left;
7618 crop[1] = crop_data->crop_info[i].crop.top;
7619 crop[2] = crop_data->crop_info[i].crop.width;
7620 crop[3] = crop_data->crop_info[i].crop.height;
7621 }
7622 roi_map.add(crop_data->crop_info[i].roi_map.left);
7623 roi_map.add(crop_data->crop_info[i].roi_map.top);
7624 roi_map.add(crop_data->crop_info[i].roi_map.width);
7625 roi_map.add(crop_data->crop_info[i].roi_map.height);
7626 streams_found++;
7627 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7628 crop[0], crop[1], crop[2], crop[3]);
7629 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7630 crop_data->crop_info[i].roi_map.left,
7631 crop_data->crop_info[i].roi_map.top,
7632 crop_data->crop_info[i].roi_map.width,
7633 crop_data->crop_info[i].roi_map.height);
7634 break;
7635
7636 }
7637 }
7638 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7639 &streams_found, 1);
7640 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7641 crop, (size_t)(streams_found * 4));
7642 if (roi_map.array()) {
7643 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7644 roi_map.array(), roi_map.size());
7645 }
7646 }
7647 if (crop) {
7648 delete [] crop;
7649 }
7650 }
7651 }
7652 }
7653
7654 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7655 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7656 // so hardcoding the CAC result to OFF mode.
7657 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7658 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7659 } else {
7660 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7661 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7662 *cacMode);
7663 if (NAME_NOT_FOUND != val) {
7664 uint8_t resultCacMode = (uint8_t)val;
7665 // check whether CAC result from CB is equal to Framework set CAC mode
7666 // If not equal then set the CAC mode came in corresponding request
7667 if (fwk_cacMode != resultCacMode) {
7668 resultCacMode = fwk_cacMode;
7669 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007670 //Check if CAC is disabled by property
7671 if (m_cacModeDisabled) {
7672 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7673 }
7674
Thierry Strudel3d639192016-09-09 11:52:26 -07007675 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7676 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7677 } else {
7678 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7679 }
7680 }
7681 }
7682
7683 // Post blob of cam_cds_data through vendor tag.
7684 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7685 uint8_t cnt = cdsInfo->num_of_streams;
7686 cam_cds_data_t cdsDataOverride;
7687 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7688 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7689 cdsDataOverride.num_of_streams = 1;
7690 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7691 uint32_t reproc_stream_id;
7692 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7693 LOGD("No reprocessible stream found, ignore cds data");
7694 } else {
7695 for (size_t i = 0; i < cnt; i++) {
7696 if (cdsInfo->cds_info[i].stream_id ==
7697 reproc_stream_id) {
7698 cdsDataOverride.cds_info[0].cds_enable =
7699 cdsInfo->cds_info[i].cds_enable;
7700 break;
7701 }
7702 }
7703 }
7704 } else {
7705 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7706 }
7707 camMetadata.update(QCAMERA3_CDS_INFO,
7708 (uint8_t *)&cdsDataOverride,
7709 sizeof(cam_cds_data_t));
7710 }
7711
7712 // Ldaf calibration data
7713 if (!mLdafCalibExist) {
7714 IF_META_AVAILABLE(uint32_t, ldafCalib,
7715 CAM_INTF_META_LDAF_EXIF, metadata) {
7716 mLdafCalibExist = true;
7717 mLdafCalib[0] = ldafCalib[0];
7718 mLdafCalib[1] = ldafCalib[1];
7719 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7720 ldafCalib[0], ldafCalib[1]);
7721 }
7722 }
7723
Thierry Strudel54dc9782017-02-15 12:12:10 -08007724 // EXIF debug data through vendor tag
7725 /*
7726 * Mobicat Mask can assume 3 values:
7727 * 1 refers to Mobicat data,
7728 * 2 refers to Stats Debug and Exif Debug Data
7729 * 3 refers to Mobicat and Stats Debug Data
7730 * We want to make sure that we are sending Exif debug data
7731 * only when Mobicat Mask is 2.
7732 */
7733 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7734 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7735 (uint8_t *)(void *)mExifParams.debug_params,
7736 sizeof(mm_jpeg_debug_exif_params_t));
7737 }
7738
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007739 // Reprocess and DDM debug data through vendor tag
7740 cam_reprocess_info_t repro_info;
7741 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007742 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7743 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007744 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007745 }
7746 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7747 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007748 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007749 }
7750 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7751 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007752 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 }
7754 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7755 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007756 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007757 }
7758 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7759 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007760 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007761 }
7762 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007763 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007764 }
7765 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7766 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007767 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007768 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007769 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7770 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7771 }
7772 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7773 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7774 }
7775 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7776 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007777
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007778 // INSTANT AEC MODE
7779 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7780 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7781 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7782 }
7783
Shuzhen Wange763e802016-03-31 10:24:29 -07007784 // AF scene change
7785 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7786 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7787 }
7788
Thierry Strudel3d639192016-09-09 11:52:26 -07007789 resultMetadata = camMetadata.release();
7790 return resultMetadata;
7791}
7792
7793/*===========================================================================
7794 * FUNCTION : saveExifParams
7795 *
7796 * DESCRIPTION:
7797 *
7798 * PARAMETERS :
7799 * @metadata : metadata information from callback
7800 *
7801 * RETURN : none
7802 *
7803 *==========================================================================*/
7804void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7805{
7806 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7807 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7808 if (mExifParams.debug_params) {
7809 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7810 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7811 }
7812 }
7813 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7814 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7815 if (mExifParams.debug_params) {
7816 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7817 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7818 }
7819 }
7820 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7821 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7822 if (mExifParams.debug_params) {
7823 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7824 mExifParams.debug_params->af_debug_params_valid = TRUE;
7825 }
7826 }
7827 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7828 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7829 if (mExifParams.debug_params) {
7830 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7831 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7832 }
7833 }
7834 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7835 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7836 if (mExifParams.debug_params) {
7837 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7838 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7839 }
7840 }
7841 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7842 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7843 if (mExifParams.debug_params) {
7844 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7845 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7846 }
7847 }
7848 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7849 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7850 if (mExifParams.debug_params) {
7851 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7852 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7853 }
7854 }
7855 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7856 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7857 if (mExifParams.debug_params) {
7858 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7859 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7860 }
7861 }
7862}
7863
7864/*===========================================================================
7865 * FUNCTION : get3AExifParams
7866 *
7867 * DESCRIPTION:
7868 *
7869 * PARAMETERS : none
7870 *
7871 *
7872 * RETURN : mm_jpeg_exif_params_t
7873 *
7874 *==========================================================================*/
7875mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7876{
7877 return mExifParams;
7878}
7879
7880/*===========================================================================
7881 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7882 *
7883 * DESCRIPTION:
7884 *
7885 * PARAMETERS :
7886 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007887 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7888 * urgent metadata in a batch. Always true for
7889 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007890 *
7891 * RETURN : camera_metadata_t*
7892 * metadata in a format specified by fwk
7893 *==========================================================================*/
7894camera_metadata_t*
7895QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007896 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007897{
7898 CameraMetadata camMetadata;
7899 camera_metadata_t *resultMetadata;
7900
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007901 if (!lastUrgentMetadataInBatch) {
7902 /* In batch mode, use empty metadata if this is not the last in batch
7903 */
7904 resultMetadata = allocate_camera_metadata(0, 0);
7905 return resultMetadata;
7906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007907
7908 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7909 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7910 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7911 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7912 }
7913
7914 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7915 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7916 &aecTrigger->trigger, 1);
7917 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7918 &aecTrigger->trigger_id, 1);
7919 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7920 aecTrigger->trigger);
7921 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7922 aecTrigger->trigger_id);
7923 }
7924
7925 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7926 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7927 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7928 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7929 }
7930
Thierry Strudel3d639192016-09-09 11:52:26 -07007931 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7932 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7933 &af_trigger->trigger, 1);
7934 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7935 af_trigger->trigger);
7936 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7937 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7938 af_trigger->trigger_id);
7939 }
7940
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07007941 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7942 /*af regions*/
7943 int32_t afRegions[REGIONS_TUPLE_COUNT];
7944 // Adjust crop region from sensor output coordinate system to active
7945 // array coordinate system.
7946 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7947 hAfRegions->rect.width, hAfRegions->rect.height);
7948
7949 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7950 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7951 REGIONS_TUPLE_COUNT);
7952 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7953 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7954 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7955 hAfRegions->rect.height);
7956 }
7957
Thierry Strudel3d639192016-09-09 11:52:26 -07007958 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7959 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7960 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7961 if (NAME_NOT_FOUND != val) {
7962 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7963 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7964 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7965 } else {
7966 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7967 }
7968 }
7969
7970 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7971 uint32_t aeMode = CAM_AE_MODE_MAX;
7972 int32_t flashMode = CAM_FLASH_MODE_MAX;
7973 int32_t redeye = -1;
7974 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7975 aeMode = *pAeMode;
7976 }
7977 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7978 flashMode = *pFlashMode;
7979 }
7980 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7981 redeye = *pRedeye;
7982 }
7983
7984 if (1 == redeye) {
7985 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7986 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7987 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7988 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7989 flashMode);
7990 if (NAME_NOT_FOUND != val) {
7991 fwk_aeMode = (uint8_t)val;
7992 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7993 } else {
7994 LOGE("Unsupported flash mode %d", flashMode);
7995 }
7996 } else if (aeMode == CAM_AE_MODE_ON) {
7997 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7998 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7999 } else if (aeMode == CAM_AE_MODE_OFF) {
8000 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8001 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8002 } else {
8003 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8004 "flashMode:%d, aeMode:%u!!!",
8005 redeye, flashMode, aeMode);
8006 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008007 if (mInstantAEC) {
8008 // Increment frame Idx count untill a bound reached for instant AEC.
8009 mInstantAecFrameIdxCount++;
8010 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8011 CAM_INTF_META_AEC_INFO, metadata) {
8012 LOGH("ae_params->settled = %d",ae_params->settled);
8013 // If AEC settled, or if number of frames reached bound value,
8014 // should reset instant AEC.
8015 if (ae_params->settled ||
8016 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8017 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8018 mInstantAEC = false;
8019 mResetInstantAEC = true;
8020 mInstantAecFrameIdxCount = 0;
8021 }
8022 }
8023 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008024 resultMetadata = camMetadata.release();
8025 return resultMetadata;
8026}
8027
8028/*===========================================================================
8029 * FUNCTION : dumpMetadataToFile
8030 *
8031 * DESCRIPTION: Dumps tuning metadata to file system
8032 *
8033 * PARAMETERS :
8034 * @meta : tuning metadata
8035 * @dumpFrameCount : current dump frame count
8036 * @enabled : Enable mask
8037 *
8038 *==========================================================================*/
8039void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8040 uint32_t &dumpFrameCount,
8041 bool enabled,
8042 const char *type,
8043 uint32_t frameNumber)
8044{
8045 //Some sanity checks
8046 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8047 LOGE("Tuning sensor data size bigger than expected %d: %d",
8048 meta.tuning_sensor_data_size,
8049 TUNING_SENSOR_DATA_MAX);
8050 return;
8051 }
8052
8053 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8054 LOGE("Tuning VFE data size bigger than expected %d: %d",
8055 meta.tuning_vfe_data_size,
8056 TUNING_VFE_DATA_MAX);
8057 return;
8058 }
8059
8060 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8061 LOGE("Tuning CPP data size bigger than expected %d: %d",
8062 meta.tuning_cpp_data_size,
8063 TUNING_CPP_DATA_MAX);
8064 return;
8065 }
8066
8067 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8068 LOGE("Tuning CAC data size bigger than expected %d: %d",
8069 meta.tuning_cac_data_size,
8070 TUNING_CAC_DATA_MAX);
8071 return;
8072 }
8073 //
8074
8075 if(enabled){
8076 char timeBuf[FILENAME_MAX];
8077 char buf[FILENAME_MAX];
8078 memset(buf, 0, sizeof(buf));
8079 memset(timeBuf, 0, sizeof(timeBuf));
8080 time_t current_time;
8081 struct tm * timeinfo;
8082 time (&current_time);
8083 timeinfo = localtime (&current_time);
8084 if (timeinfo != NULL) {
8085 strftime (timeBuf, sizeof(timeBuf),
8086 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8087 }
8088 String8 filePath(timeBuf);
8089 snprintf(buf,
8090 sizeof(buf),
8091 "%dm_%s_%d.bin",
8092 dumpFrameCount,
8093 type,
8094 frameNumber);
8095 filePath.append(buf);
8096 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8097 if (file_fd >= 0) {
8098 ssize_t written_len = 0;
8099 meta.tuning_data_version = TUNING_DATA_VERSION;
8100 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8101 written_len += write(file_fd, data, sizeof(uint32_t));
8102 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8103 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8104 written_len += write(file_fd, data, sizeof(uint32_t));
8105 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8106 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8107 written_len += write(file_fd, data, sizeof(uint32_t));
8108 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8109 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8110 written_len += write(file_fd, data, sizeof(uint32_t));
8111 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8112 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8113 written_len += write(file_fd, data, sizeof(uint32_t));
8114 meta.tuning_mod3_data_size = 0;
8115 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8116 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8117 written_len += write(file_fd, data, sizeof(uint32_t));
8118 size_t total_size = meta.tuning_sensor_data_size;
8119 data = (void *)((uint8_t *)&meta.data);
8120 written_len += write(file_fd, data, total_size);
8121 total_size = meta.tuning_vfe_data_size;
8122 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8123 written_len += write(file_fd, data, total_size);
8124 total_size = meta.tuning_cpp_data_size;
8125 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8126 written_len += write(file_fd, data, total_size);
8127 total_size = meta.tuning_cac_data_size;
8128 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8129 written_len += write(file_fd, data, total_size);
8130 close(file_fd);
8131 }else {
8132 LOGE("fail to open file for metadata dumping");
8133 }
8134 }
8135}
8136
8137/*===========================================================================
8138 * FUNCTION : cleanAndSortStreamInfo
8139 *
8140 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8141 * and sort them such that raw stream is at the end of the list
8142 * This is a workaround for camera daemon constraint.
8143 *
8144 * PARAMETERS : None
8145 *
8146 *==========================================================================*/
8147void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8148{
8149 List<stream_info_t *> newStreamInfo;
8150
8151 /*clean up invalid streams*/
8152 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8153 it != mStreamInfo.end();) {
8154 if(((*it)->status) == INVALID){
8155 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8156 delete channel;
8157 free(*it);
8158 it = mStreamInfo.erase(it);
8159 } else {
8160 it++;
8161 }
8162 }
8163
8164 // Move preview/video/callback/snapshot streams into newList
8165 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8166 it != mStreamInfo.end();) {
8167 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8168 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8169 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8170 newStreamInfo.push_back(*it);
8171 it = mStreamInfo.erase(it);
8172 } else
8173 it++;
8174 }
8175 // Move raw streams into newList
8176 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8177 it != mStreamInfo.end();) {
8178 newStreamInfo.push_back(*it);
8179 it = mStreamInfo.erase(it);
8180 }
8181
8182 mStreamInfo = newStreamInfo;
8183}
8184
8185/*===========================================================================
8186 * FUNCTION : extractJpegMetadata
8187 *
8188 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8189 * JPEG metadata is cached in HAL, and return as part of capture
8190 * result when metadata is returned from camera daemon.
8191 *
8192 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8193 * @request: capture request
8194 *
8195 *==========================================================================*/
8196void QCamera3HardwareInterface::extractJpegMetadata(
8197 CameraMetadata& jpegMetadata,
8198 const camera3_capture_request_t *request)
8199{
8200 CameraMetadata frame_settings;
8201 frame_settings = request->settings;
8202
8203 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8204 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8205 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8206 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8207
8208 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8209 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8210 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8211 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8212
8213 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8214 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8215 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8216 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8217
8218 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8219 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8220 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8221 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8222
8223 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8224 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8225 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8226 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8227
8228 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8229 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8230 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8231 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8232
8233 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8234 int32_t thumbnail_size[2];
8235 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8236 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8237 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8238 int32_t orientation =
8239 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008240 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008241 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8242 int32_t temp;
8243 temp = thumbnail_size[0];
8244 thumbnail_size[0] = thumbnail_size[1];
8245 thumbnail_size[1] = temp;
8246 }
8247 }
8248 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8249 thumbnail_size,
8250 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8251 }
8252
8253}
8254
8255/*===========================================================================
8256 * FUNCTION : convertToRegions
8257 *
8258 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8259 *
8260 * PARAMETERS :
8261 * @rect : cam_rect_t struct to convert
8262 * @region : int32_t destination array
8263 * @weight : if we are converting from cam_area_t, weight is valid
8264 * else weight = -1
8265 *
8266 *==========================================================================*/
8267void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8268 int32_t *region, int weight)
8269{
8270 region[0] = rect.left;
8271 region[1] = rect.top;
8272 region[2] = rect.left + rect.width;
8273 region[3] = rect.top + rect.height;
8274 if (weight > -1) {
8275 region[4] = weight;
8276 }
8277}
8278
8279/*===========================================================================
8280 * FUNCTION : convertFromRegions
8281 *
8282 * DESCRIPTION: helper method to convert from array to cam_rect_t
8283 *
8284 * PARAMETERS :
8285 * @rect : cam_rect_t struct to convert
8286 * @region : int32_t destination array
8287 * @weight : if we are converting from cam_area_t, weight is valid
8288 * else weight = -1
8289 *
8290 *==========================================================================*/
8291void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008292 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008293{
Thierry Strudel3d639192016-09-09 11:52:26 -07008294 int32_t x_min = frame_settings.find(tag).data.i32[0];
8295 int32_t y_min = frame_settings.find(tag).data.i32[1];
8296 int32_t x_max = frame_settings.find(tag).data.i32[2];
8297 int32_t y_max = frame_settings.find(tag).data.i32[3];
8298 roi.weight = frame_settings.find(tag).data.i32[4];
8299 roi.rect.left = x_min;
8300 roi.rect.top = y_min;
8301 roi.rect.width = x_max - x_min;
8302 roi.rect.height = y_max - y_min;
8303}
8304
8305/*===========================================================================
8306 * FUNCTION : resetIfNeededROI
8307 *
8308 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8309 * crop region
8310 *
8311 * PARAMETERS :
8312 * @roi : cam_area_t struct to resize
8313 * @scalerCropRegion : cam_crop_region_t region to compare against
8314 *
8315 *
8316 *==========================================================================*/
8317bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8318 const cam_crop_region_t* scalerCropRegion)
8319{
8320 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8321 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8322 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8323 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8324
8325 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8326 * without having this check the calculations below to validate if the roi
8327 * is inside scalar crop region will fail resulting in the roi not being
8328 * reset causing algorithm to continue to use stale roi window
8329 */
8330 if (roi->weight == 0) {
8331 return true;
8332 }
8333
8334 if ((roi_x_max < scalerCropRegion->left) ||
8335 // right edge of roi window is left of scalar crop's left edge
8336 (roi_y_max < scalerCropRegion->top) ||
8337 // bottom edge of roi window is above scalar crop's top edge
8338 (roi->rect.left > crop_x_max) ||
8339 // left edge of roi window is beyond(right) of scalar crop's right edge
8340 (roi->rect.top > crop_y_max)){
8341 // top edge of roi windo is above scalar crop's top edge
8342 return false;
8343 }
8344 if (roi->rect.left < scalerCropRegion->left) {
8345 roi->rect.left = scalerCropRegion->left;
8346 }
8347 if (roi->rect.top < scalerCropRegion->top) {
8348 roi->rect.top = scalerCropRegion->top;
8349 }
8350 if (roi_x_max > crop_x_max) {
8351 roi_x_max = crop_x_max;
8352 }
8353 if (roi_y_max > crop_y_max) {
8354 roi_y_max = crop_y_max;
8355 }
8356 roi->rect.width = roi_x_max - roi->rect.left;
8357 roi->rect.height = roi_y_max - roi->rect.top;
8358 return true;
8359}
8360
8361/*===========================================================================
8362 * FUNCTION : convertLandmarks
8363 *
8364 * DESCRIPTION: helper method to extract the landmarks from face detection info
8365 *
8366 * PARAMETERS :
8367 * @landmark_data : input landmark data to be converted
8368 * @landmarks : int32_t destination array
8369 *
8370 *
8371 *==========================================================================*/
8372void QCamera3HardwareInterface::convertLandmarks(
8373 cam_face_landmarks_info_t landmark_data,
8374 int32_t *landmarks)
8375{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008376 if (landmark_data.is_left_eye_valid) {
8377 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8378 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8379 } else {
8380 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8381 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8382 }
8383
8384 if (landmark_data.is_right_eye_valid) {
8385 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8386 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8387 } else {
8388 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8389 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8390 }
8391
8392 if (landmark_data.is_mouth_valid) {
8393 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8394 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8395 } else {
8396 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8397 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8398 }
8399}
8400
8401/*===========================================================================
8402 * FUNCTION : setInvalidLandmarks
8403 *
8404 * DESCRIPTION: helper method to set invalid landmarks
8405 *
8406 * PARAMETERS :
8407 * @landmarks : int32_t destination array
8408 *
8409 *
8410 *==========================================================================*/
8411void QCamera3HardwareInterface::setInvalidLandmarks(
8412 int32_t *landmarks)
8413{
8414 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8415 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8416 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8417 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8418 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8419 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008420}
8421
8422#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008423
8424/*===========================================================================
8425 * FUNCTION : getCapabilities
8426 *
8427 * DESCRIPTION: query camera capability from back-end
8428 *
8429 * PARAMETERS :
8430 * @ops : mm-interface ops structure
8431 * @cam_handle : camera handle for which we need capability
8432 *
8433 * RETURN : ptr type of capability structure
8434 * capability for success
8435 * NULL for failure
8436 *==========================================================================*/
8437cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8438 uint32_t cam_handle)
8439{
8440 int rc = NO_ERROR;
8441 QCamera3HeapMemory *capabilityHeap = NULL;
8442 cam_capability_t *cap_ptr = NULL;
8443
8444 if (ops == NULL) {
8445 LOGE("Invalid arguments");
8446 return NULL;
8447 }
8448
8449 capabilityHeap = new QCamera3HeapMemory(1);
8450 if (capabilityHeap == NULL) {
8451 LOGE("creation of capabilityHeap failed");
8452 return NULL;
8453 }
8454
8455 /* Allocate memory for capability buffer */
8456 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8457 if(rc != OK) {
8458 LOGE("No memory for cappability");
8459 goto allocate_failed;
8460 }
8461
8462 /* Map memory for capability buffer */
8463 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8464
8465 rc = ops->map_buf(cam_handle,
8466 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8467 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8468 if(rc < 0) {
8469 LOGE("failed to map capability buffer");
8470 rc = FAILED_TRANSACTION;
8471 goto map_failed;
8472 }
8473
8474 /* Query Capability */
8475 rc = ops->query_capability(cam_handle);
8476 if(rc < 0) {
8477 LOGE("failed to query capability");
8478 rc = FAILED_TRANSACTION;
8479 goto query_failed;
8480 }
8481
8482 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8483 if (cap_ptr == NULL) {
8484 LOGE("out of memory");
8485 rc = NO_MEMORY;
8486 goto query_failed;
8487 }
8488
8489 memset(cap_ptr, 0, sizeof(cam_capability_t));
8490 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8491
8492 int index;
8493 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8494 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8495 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8496 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8497 }
8498
8499query_failed:
8500 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8501map_failed:
8502 capabilityHeap->deallocate();
8503allocate_failed:
8504 delete capabilityHeap;
8505
8506 if (rc != NO_ERROR) {
8507 return NULL;
8508 } else {
8509 return cap_ptr;
8510 }
8511}
8512
Thierry Strudel3d639192016-09-09 11:52:26 -07008513/*===========================================================================
8514 * FUNCTION : initCapabilities
8515 *
8516 * DESCRIPTION: initialize camera capabilities in static data struct
8517 *
8518 * PARAMETERS :
8519 * @cameraId : camera Id
8520 *
8521 * RETURN : int32_t type of status
8522 * NO_ERROR -- success
8523 * none-zero failure code
8524 *==========================================================================*/
8525int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8526{
8527 int rc = 0;
8528 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008529 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008530
8531 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8532 if (rc) {
8533 LOGE("camera_open failed. rc = %d", rc);
8534 goto open_failed;
8535 }
8536 if (!cameraHandle) {
8537 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8538 goto open_failed;
8539 }
8540
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008541 handle = get_main_camera_handle(cameraHandle->camera_handle);
8542 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8543 if (gCamCapability[cameraId] == NULL) {
8544 rc = FAILED_TRANSACTION;
8545 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008546 }
8547
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008548 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008549 if (is_dual_camera_by_idx(cameraId)) {
8550 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8551 gCamCapability[cameraId]->aux_cam_cap =
8552 getCapabilities(cameraHandle->ops, handle);
8553 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8554 rc = FAILED_TRANSACTION;
8555 free(gCamCapability[cameraId]);
8556 goto failed_op;
8557 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008558
8559 // Copy the main camera capability to main_cam_cap struct
8560 gCamCapability[cameraId]->main_cam_cap =
8561 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8562 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8563 LOGE("out of memory");
8564 rc = NO_MEMORY;
8565 goto failed_op;
8566 }
8567 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8568 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008569 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008570failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008571 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8572 cameraHandle = NULL;
8573open_failed:
8574 return rc;
8575}
8576
8577/*==========================================================================
8578 * FUNCTION : get3Aversion
8579 *
8580 * DESCRIPTION: get the Q3A S/W version
8581 *
8582 * PARAMETERS :
8583 * @sw_version: Reference of Q3A structure which will hold version info upon
8584 * return
8585 *
8586 * RETURN : None
8587 *
8588 *==========================================================================*/
8589void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8590{
8591 if(gCamCapability[mCameraId])
8592 sw_version = gCamCapability[mCameraId]->q3a_version;
8593 else
8594 LOGE("Capability structure NULL!");
8595}
8596
8597
8598/*===========================================================================
8599 * FUNCTION : initParameters
8600 *
8601 * DESCRIPTION: initialize camera parameters
8602 *
8603 * PARAMETERS :
8604 *
8605 * RETURN : int32_t type of status
8606 * NO_ERROR -- success
8607 * none-zero failure code
8608 *==========================================================================*/
8609int QCamera3HardwareInterface::initParameters()
8610{
8611 int rc = 0;
8612
8613 //Allocate Set Param Buffer
8614 mParamHeap = new QCamera3HeapMemory(1);
8615 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8616 if(rc != OK) {
8617 rc = NO_MEMORY;
8618 LOGE("Failed to allocate SETPARM Heap memory");
8619 delete mParamHeap;
8620 mParamHeap = NULL;
8621 return rc;
8622 }
8623
8624 //Map memory for parameters buffer
8625 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8626 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8627 mParamHeap->getFd(0),
8628 sizeof(metadata_buffer_t),
8629 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8630 if(rc < 0) {
8631 LOGE("failed to map SETPARM buffer");
8632 rc = FAILED_TRANSACTION;
8633 mParamHeap->deallocate();
8634 delete mParamHeap;
8635 mParamHeap = NULL;
8636 return rc;
8637 }
8638
8639 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8640
8641 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8642 return rc;
8643}
8644
8645/*===========================================================================
8646 * FUNCTION : deinitParameters
8647 *
8648 * DESCRIPTION: de-initialize camera parameters
8649 *
8650 * PARAMETERS :
8651 *
8652 * RETURN : NONE
8653 *==========================================================================*/
8654void QCamera3HardwareInterface::deinitParameters()
8655{
8656 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8657 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8658
8659 mParamHeap->deallocate();
8660 delete mParamHeap;
8661 mParamHeap = NULL;
8662
8663 mParameters = NULL;
8664
8665 free(mPrevParameters);
8666 mPrevParameters = NULL;
8667}
8668
8669/*===========================================================================
8670 * FUNCTION : calcMaxJpegSize
8671 *
8672 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8673 *
8674 * PARAMETERS :
8675 *
8676 * RETURN : max_jpeg_size
8677 *==========================================================================*/
8678size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8679{
8680 size_t max_jpeg_size = 0;
8681 size_t temp_width, temp_height;
8682 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8683 MAX_SIZES_CNT);
8684 for (size_t i = 0; i < count; i++) {
8685 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8686 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8687 if (temp_width * temp_height > max_jpeg_size ) {
8688 max_jpeg_size = temp_width * temp_height;
8689 }
8690 }
8691 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8692 return max_jpeg_size;
8693}
8694
8695/*===========================================================================
8696 * FUNCTION : getMaxRawSize
8697 *
8698 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8699 *
8700 * PARAMETERS :
8701 *
8702 * RETURN : Largest supported Raw Dimension
8703 *==========================================================================*/
8704cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8705{
8706 int max_width = 0;
8707 cam_dimension_t maxRawSize;
8708
8709 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8710 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8711 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8712 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8713 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8714 }
8715 }
8716 return maxRawSize;
8717}
8718
8719
8720/*===========================================================================
8721 * FUNCTION : calcMaxJpegDim
8722 *
8723 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8724 *
8725 * PARAMETERS :
8726 *
8727 * RETURN : max_jpeg_dim
8728 *==========================================================================*/
8729cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8730{
8731 cam_dimension_t max_jpeg_dim;
8732 cam_dimension_t curr_jpeg_dim;
8733 max_jpeg_dim.width = 0;
8734 max_jpeg_dim.height = 0;
8735 curr_jpeg_dim.width = 0;
8736 curr_jpeg_dim.height = 0;
8737 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8738 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8739 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8740 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8741 max_jpeg_dim.width * max_jpeg_dim.height ) {
8742 max_jpeg_dim.width = curr_jpeg_dim.width;
8743 max_jpeg_dim.height = curr_jpeg_dim.height;
8744 }
8745 }
8746 return max_jpeg_dim;
8747}
8748
8749/*===========================================================================
8750 * FUNCTION : addStreamConfig
8751 *
8752 * DESCRIPTION: adds the stream configuration to the array
8753 *
8754 * PARAMETERS :
8755 * @available_stream_configs : pointer to stream configuration array
8756 * @scalar_format : scalar format
8757 * @dim : configuration dimension
8758 * @config_type : input or output configuration type
8759 *
8760 * RETURN : NONE
8761 *==========================================================================*/
8762void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8763 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8764{
8765 available_stream_configs.add(scalar_format);
8766 available_stream_configs.add(dim.width);
8767 available_stream_configs.add(dim.height);
8768 available_stream_configs.add(config_type);
8769}
8770
8771/*===========================================================================
8772 * FUNCTION : suppportBurstCapture
8773 *
8774 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8775 *
8776 * PARAMETERS :
8777 * @cameraId : camera Id
8778 *
8779 * RETURN : true if camera supports BURST_CAPTURE
8780 * false otherwise
8781 *==========================================================================*/
8782bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8783{
8784 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8785 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8786 const int32_t highResWidth = 3264;
8787 const int32_t highResHeight = 2448;
8788
8789 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8790 // Maximum resolution images cannot be captured at >= 10fps
8791 // -> not supporting BURST_CAPTURE
8792 return false;
8793 }
8794
8795 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8796 // Maximum resolution images can be captured at >= 20fps
8797 // --> supporting BURST_CAPTURE
8798 return true;
8799 }
8800
8801 // Find the smallest highRes resolution, or largest resolution if there is none
8802 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8803 MAX_SIZES_CNT);
8804 size_t highRes = 0;
8805 while ((highRes + 1 < totalCnt) &&
8806 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8807 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8808 highResWidth * highResHeight)) {
8809 highRes++;
8810 }
8811 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8812 return true;
8813 } else {
8814 return false;
8815 }
8816}
8817
8818/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008819 * FUNCTION : getPDStatIndex
8820 *
8821 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8822 *
8823 * PARAMETERS :
8824 * @caps : camera capabilities
8825 *
8826 * RETURN : int32_t type
8827 * non-negative - on success
8828 * -1 - on failure
8829 *==========================================================================*/
8830int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8831 if (nullptr == caps) {
8832 return -1;
8833 }
8834
8835 uint32_t metaRawCount = caps->meta_raw_channel_count;
8836 int32_t ret = -1;
8837 for (size_t i = 0; i < metaRawCount; i++) {
8838 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8839 ret = i;
8840 break;
8841 }
8842 }
8843
8844 return ret;
8845}
8846
8847/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008848 * FUNCTION : initStaticMetadata
8849 *
8850 * DESCRIPTION: initialize the static metadata
8851 *
8852 * PARAMETERS :
8853 * @cameraId : camera Id
8854 *
8855 * RETURN : int32_t type of status
8856 * 0 -- success
8857 * non-zero failure code
8858 *==========================================================================*/
8859int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8860{
8861 int rc = 0;
8862 CameraMetadata staticInfo;
8863 size_t count = 0;
8864 bool limitedDevice = false;
8865 char prop[PROPERTY_VALUE_MAX];
8866 bool supportBurst = false;
8867
8868 supportBurst = supportBurstCapture(cameraId);
8869
8870 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8871 * guaranteed or if min fps of max resolution is less than 20 fps, its
8872 * advertised as limited device*/
8873 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8874 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8875 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8876 !supportBurst;
8877
8878 uint8_t supportedHwLvl = limitedDevice ?
8879 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008880#ifndef USE_HAL_3_3
8881 // LEVEL_3 - This device will support level 3.
8882 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8883#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008884 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008885#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008886
8887 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8888 &supportedHwLvl, 1);
8889
8890 bool facingBack = false;
8891 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8892 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8893 facingBack = true;
8894 }
8895 /*HAL 3 only*/
8896 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8897 &gCamCapability[cameraId]->min_focus_distance, 1);
8898
8899 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8900 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8901
8902 /*should be using focal lengths but sensor doesn't provide that info now*/
8903 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8904 &gCamCapability[cameraId]->focal_length,
8905 1);
8906
8907 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8908 gCamCapability[cameraId]->apertures,
8909 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8910
8911 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8912 gCamCapability[cameraId]->filter_densities,
8913 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8914
8915
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008916 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8917 size_t mode_count =
8918 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8919 for (size_t i = 0; i < mode_count; i++) {
8920 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8921 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008922 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008923 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008924
8925 int32_t lens_shading_map_size[] = {
8926 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8927 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8928 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8929 lens_shading_map_size,
8930 sizeof(lens_shading_map_size)/sizeof(int32_t));
8931
8932 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8933 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8934
8935 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8936 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8937
8938 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8939 &gCamCapability[cameraId]->max_frame_duration, 1);
8940
8941 camera_metadata_rational baseGainFactor = {
8942 gCamCapability[cameraId]->base_gain_factor.numerator,
8943 gCamCapability[cameraId]->base_gain_factor.denominator};
8944 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8945 &baseGainFactor, 1);
8946
8947 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8948 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8949
8950 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8951 gCamCapability[cameraId]->pixel_array_size.height};
8952 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8953 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8954
8955 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8956 gCamCapability[cameraId]->active_array_size.top,
8957 gCamCapability[cameraId]->active_array_size.width,
8958 gCamCapability[cameraId]->active_array_size.height};
8959 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8960 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8961
8962 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8963 &gCamCapability[cameraId]->white_level, 1);
8964
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008965 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8966 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8967 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008968 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008969 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008970
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008971#ifndef USE_HAL_3_3
8972 bool hasBlackRegions = false;
8973 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8974 LOGW("black_region_count: %d is bounded to %d",
8975 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8976 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8977 }
8978 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8979 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8980 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8981 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8982 }
8983 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8984 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8985 hasBlackRegions = true;
8986 }
8987#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008988 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8989 &gCamCapability[cameraId]->flash_charge_duration, 1);
8990
8991 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8992 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8993
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008994 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8995 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8996 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008997 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8998 &timestampSource, 1);
8999
Thierry Strudel54dc9782017-02-15 12:12:10 -08009000 //update histogram vendor data
9001 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009002 &gCamCapability[cameraId]->histogram_size, 1);
9003
Thierry Strudel54dc9782017-02-15 12:12:10 -08009004 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009005 &gCamCapability[cameraId]->max_histogram_count, 1);
9006
Shuzhen Wang14415f52016-11-16 18:26:18 -08009007 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9008 //so that app can request fewer number of bins than the maximum supported.
9009 std::vector<int32_t> histBins;
9010 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9011 histBins.push_back(maxHistBins);
9012 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9013 (maxHistBins & 0x1) == 0) {
9014 histBins.push_back(maxHistBins >> 1);
9015 maxHistBins >>= 1;
9016 }
9017 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9018 histBins.data(), histBins.size());
9019
Thierry Strudel3d639192016-09-09 11:52:26 -07009020 int32_t sharpness_map_size[] = {
9021 gCamCapability[cameraId]->sharpness_map_size.width,
9022 gCamCapability[cameraId]->sharpness_map_size.height};
9023
9024 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9025 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9026
9027 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9028 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9029
Emilian Peev0f3c3162017-03-15 12:57:46 +00009030 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9031 if (0 <= indexPD) {
9032 // Advertise PD stats data as part of the Depth capabilities
9033 int32_t depthWidth =
9034 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9035 int32_t depthHeight =
9036 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9037 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9038 assert(0 < depthSamplesCount);
9039 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9040 &depthSamplesCount, 1);
9041
9042 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9043 depthHeight,
9044 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9045 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9046 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9047 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9048 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9049
9050 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9051 depthHeight, 33333333,
9052 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9053 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9054 depthMinDuration,
9055 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9056
9057 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9058 depthHeight, 0,
9059 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9060 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9061 depthStallDuration,
9062 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9063
9064 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9065 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9066 }
9067
Thierry Strudel3d639192016-09-09 11:52:26 -07009068 int32_t scalar_formats[] = {
9069 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9070 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9071 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9072 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9073 HAL_PIXEL_FORMAT_RAW10,
9074 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009075 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9076 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9077 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009078
9079 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9080 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9081 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9082 count, MAX_SIZES_CNT, available_processed_sizes);
9083 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9084 available_processed_sizes, count * 2);
9085
9086 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9087 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9088 makeTable(gCamCapability[cameraId]->raw_dim,
9089 count, MAX_SIZES_CNT, available_raw_sizes);
9090 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9091 available_raw_sizes, count * 2);
9092
9093 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9094 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9095 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9096 count, MAX_SIZES_CNT, available_fps_ranges);
9097 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9098 available_fps_ranges, count * 2);
9099
9100 camera_metadata_rational exposureCompensationStep = {
9101 gCamCapability[cameraId]->exp_compensation_step.numerator,
9102 gCamCapability[cameraId]->exp_compensation_step.denominator};
9103 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9104 &exposureCompensationStep, 1);
9105
9106 Vector<uint8_t> availableVstabModes;
9107 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9108 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009109 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009110 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009111 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009112 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009113 count = IS_TYPE_MAX;
9114 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9115 for (size_t i = 0; i < count; i++) {
9116 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9117 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9118 eisSupported = true;
9119 break;
9120 }
9121 }
9122 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009123 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9124 }
9125 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9126 availableVstabModes.array(), availableVstabModes.size());
9127
9128 /*HAL 1 and HAL 3 common*/
9129 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9130 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9131 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
9132 float maxZoom = maxZoomStep/minZoomStep;
9133 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9134 &maxZoom, 1);
9135
9136 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9137 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9138
9139 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9140 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9141 max3aRegions[2] = 0; /* AF not supported */
9142 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9143 max3aRegions, 3);
9144
9145 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9146 memset(prop, 0, sizeof(prop));
9147 property_get("persist.camera.facedetect", prop, "1");
9148 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9149 LOGD("Support face detection mode: %d",
9150 supportedFaceDetectMode);
9151
9152 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009153 /* support mode should be OFF if max number of face is 0 */
9154 if (maxFaces <= 0) {
9155 supportedFaceDetectMode = 0;
9156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009157 Vector<uint8_t> availableFaceDetectModes;
9158 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9159 if (supportedFaceDetectMode == 1) {
9160 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9161 } else if (supportedFaceDetectMode == 2) {
9162 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9163 } else if (supportedFaceDetectMode == 3) {
9164 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9165 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9166 } else {
9167 maxFaces = 0;
9168 }
9169 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9170 availableFaceDetectModes.array(),
9171 availableFaceDetectModes.size());
9172 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9173 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009174 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9175 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9176 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009177
9178 int32_t exposureCompensationRange[] = {
9179 gCamCapability[cameraId]->exposure_compensation_min,
9180 gCamCapability[cameraId]->exposure_compensation_max};
9181 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9182 exposureCompensationRange,
9183 sizeof(exposureCompensationRange)/sizeof(int32_t));
9184
9185 uint8_t lensFacing = (facingBack) ?
9186 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9187 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9188
9189 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9190 available_thumbnail_sizes,
9191 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9192
9193 /*all sizes will be clubbed into this tag*/
9194 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9195 /*android.scaler.availableStreamConfigurations*/
9196 Vector<int32_t> available_stream_configs;
9197 cam_dimension_t active_array_dim;
9198 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9199 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009200
9201 /*advertise list of input dimensions supported based on below property.
9202 By default all sizes upto 5MP will be advertised.
9203 Note that the setprop resolution format should be WxH.
9204 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9205 To list all supported sizes, setprop needs to be set with "0x0" */
9206 cam_dimension_t minInputSize = {2592,1944}; //5MP
9207 memset(prop, 0, sizeof(prop));
9208 property_get("persist.camera.input.minsize", prop, "2592x1944");
9209 if (strlen(prop) > 0) {
9210 char *saveptr = NULL;
9211 char *token = strtok_r(prop, "x", &saveptr);
9212 if (token != NULL) {
9213 minInputSize.width = atoi(token);
9214 }
9215 token = strtok_r(NULL, "x", &saveptr);
9216 if (token != NULL) {
9217 minInputSize.height = atoi(token);
9218 }
9219 }
9220
Thierry Strudel3d639192016-09-09 11:52:26 -07009221 /* Add input/output stream configurations for each scalar formats*/
9222 for (size_t j = 0; j < scalar_formats_count; j++) {
9223 switch (scalar_formats[j]) {
9224 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9225 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9226 case HAL_PIXEL_FORMAT_RAW10:
9227 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9228 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9229 addStreamConfig(available_stream_configs, scalar_formats[j],
9230 gCamCapability[cameraId]->raw_dim[i],
9231 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9232 }
9233 break;
9234 case HAL_PIXEL_FORMAT_BLOB:
9235 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9236 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9237 addStreamConfig(available_stream_configs, scalar_formats[j],
9238 gCamCapability[cameraId]->picture_sizes_tbl[i],
9239 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9240 }
9241 break;
9242 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9243 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9244 default:
9245 cam_dimension_t largest_picture_size;
9246 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9247 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9248 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9249 addStreamConfig(available_stream_configs, scalar_formats[j],
9250 gCamCapability[cameraId]->picture_sizes_tbl[i],
9251 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009252 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9253 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9254 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9255 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9256 >= minInputSize.width) || (gCamCapability[cameraId]->
9257 picture_sizes_tbl[i].height >= minInputSize.height)) {
9258 addStreamConfig(available_stream_configs, scalar_formats[j],
9259 gCamCapability[cameraId]->picture_sizes_tbl[i],
9260 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9261 }
9262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009263 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009264
Thierry Strudel3d639192016-09-09 11:52:26 -07009265 break;
9266 }
9267 }
9268
9269 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9270 available_stream_configs.array(), available_stream_configs.size());
9271 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9272 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9273
9274 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9275 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9276
9277 /* android.scaler.availableMinFrameDurations */
9278 Vector<int64_t> available_min_durations;
9279 for (size_t j = 0; j < scalar_formats_count; j++) {
9280 switch (scalar_formats[j]) {
9281 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9282 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9283 case HAL_PIXEL_FORMAT_RAW10:
9284 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9285 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9286 available_min_durations.add(scalar_formats[j]);
9287 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9288 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9289 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9290 }
9291 break;
9292 default:
9293 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9294 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9295 available_min_durations.add(scalar_formats[j]);
9296 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9297 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9298 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9299 }
9300 break;
9301 }
9302 }
9303 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9304 available_min_durations.array(), available_min_durations.size());
9305
9306 Vector<int32_t> available_hfr_configs;
9307 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9308 int32_t fps = 0;
9309 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9310 case CAM_HFR_MODE_60FPS:
9311 fps = 60;
9312 break;
9313 case CAM_HFR_MODE_90FPS:
9314 fps = 90;
9315 break;
9316 case CAM_HFR_MODE_120FPS:
9317 fps = 120;
9318 break;
9319 case CAM_HFR_MODE_150FPS:
9320 fps = 150;
9321 break;
9322 case CAM_HFR_MODE_180FPS:
9323 fps = 180;
9324 break;
9325 case CAM_HFR_MODE_210FPS:
9326 fps = 210;
9327 break;
9328 case CAM_HFR_MODE_240FPS:
9329 fps = 240;
9330 break;
9331 case CAM_HFR_MODE_480FPS:
9332 fps = 480;
9333 break;
9334 case CAM_HFR_MODE_OFF:
9335 case CAM_HFR_MODE_MAX:
9336 default:
9337 break;
9338 }
9339
9340 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9341 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9342 /* For each HFR frame rate, need to advertise one variable fps range
9343 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9344 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9345 * set by the app. When video recording is started, [120, 120] is
9346 * set. This way sensor configuration does not change when recording
9347 * is started */
9348
9349 /* (width, height, fps_min, fps_max, batch_size_max) */
9350 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9351 j < MAX_SIZES_CNT; j++) {
9352 available_hfr_configs.add(
9353 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9354 available_hfr_configs.add(
9355 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9356 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9357 available_hfr_configs.add(fps);
9358 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9359
9360 /* (width, height, fps_min, fps_max, batch_size_max) */
9361 available_hfr_configs.add(
9362 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9363 available_hfr_configs.add(
9364 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9365 available_hfr_configs.add(fps);
9366 available_hfr_configs.add(fps);
9367 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9368 }
9369 }
9370 }
9371 //Advertise HFR capability only if the property is set
9372 memset(prop, 0, sizeof(prop));
9373 property_get("persist.camera.hal3hfr.enable", prop, "1");
9374 uint8_t hfrEnable = (uint8_t)atoi(prop);
9375
9376 if(hfrEnable && available_hfr_configs.array()) {
9377 staticInfo.update(
9378 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9379 available_hfr_configs.array(), available_hfr_configs.size());
9380 }
9381
9382 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9383 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9384 &max_jpeg_size, 1);
9385
9386 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9387 size_t size = 0;
9388 count = CAM_EFFECT_MODE_MAX;
9389 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9390 for (size_t i = 0; i < count; i++) {
9391 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9392 gCamCapability[cameraId]->supported_effects[i]);
9393 if (NAME_NOT_FOUND != val) {
9394 avail_effects[size] = (uint8_t)val;
9395 size++;
9396 }
9397 }
9398 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9399 avail_effects,
9400 size);
9401
9402 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9403 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9404 size_t supported_scene_modes_cnt = 0;
9405 count = CAM_SCENE_MODE_MAX;
9406 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9407 for (size_t i = 0; i < count; i++) {
9408 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9409 CAM_SCENE_MODE_OFF) {
9410 int val = lookupFwkName(SCENE_MODES_MAP,
9411 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9412 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009413
Thierry Strudel3d639192016-09-09 11:52:26 -07009414 if (NAME_NOT_FOUND != val) {
9415 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9416 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9417 supported_scene_modes_cnt++;
9418 }
9419 }
9420 }
9421 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9422 avail_scene_modes,
9423 supported_scene_modes_cnt);
9424
9425 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9426 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9427 supported_scene_modes_cnt,
9428 CAM_SCENE_MODE_MAX,
9429 scene_mode_overrides,
9430 supported_indexes,
9431 cameraId);
9432
9433 if (supported_scene_modes_cnt == 0) {
9434 supported_scene_modes_cnt = 1;
9435 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9436 }
9437
9438 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9439 scene_mode_overrides, supported_scene_modes_cnt * 3);
9440
9441 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9442 ANDROID_CONTROL_MODE_AUTO,
9443 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9444 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9445 available_control_modes,
9446 3);
9447
9448 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9449 size = 0;
9450 count = CAM_ANTIBANDING_MODE_MAX;
9451 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9452 for (size_t i = 0; i < count; i++) {
9453 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9454 gCamCapability[cameraId]->supported_antibandings[i]);
9455 if (NAME_NOT_FOUND != val) {
9456 avail_antibanding_modes[size] = (uint8_t)val;
9457 size++;
9458 }
9459
9460 }
9461 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9462 avail_antibanding_modes,
9463 size);
9464
9465 uint8_t avail_abberation_modes[] = {
9466 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9467 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9468 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9469 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9470 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9471 if (0 == count) {
9472 // If no aberration correction modes are available for a device, this advertise OFF mode
9473 size = 1;
9474 } else {
9475 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9476 // So, advertize all 3 modes if atleast any one mode is supported as per the
9477 // new M requirement
9478 size = 3;
9479 }
9480 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9481 avail_abberation_modes,
9482 size);
9483
9484 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9485 size = 0;
9486 count = CAM_FOCUS_MODE_MAX;
9487 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9488 for (size_t i = 0; i < count; i++) {
9489 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9490 gCamCapability[cameraId]->supported_focus_modes[i]);
9491 if (NAME_NOT_FOUND != val) {
9492 avail_af_modes[size] = (uint8_t)val;
9493 size++;
9494 }
9495 }
9496 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9497 avail_af_modes,
9498 size);
9499
9500 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9501 size = 0;
9502 count = CAM_WB_MODE_MAX;
9503 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9504 for (size_t i = 0; i < count; i++) {
9505 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9506 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9507 gCamCapability[cameraId]->supported_white_balances[i]);
9508 if (NAME_NOT_FOUND != val) {
9509 avail_awb_modes[size] = (uint8_t)val;
9510 size++;
9511 }
9512 }
9513 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9514 avail_awb_modes,
9515 size);
9516
9517 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9518 count = CAM_FLASH_FIRING_LEVEL_MAX;
9519 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9520 count);
9521 for (size_t i = 0; i < count; i++) {
9522 available_flash_levels[i] =
9523 gCamCapability[cameraId]->supported_firing_levels[i];
9524 }
9525 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9526 available_flash_levels, count);
9527
9528 uint8_t flashAvailable;
9529 if (gCamCapability[cameraId]->flash_available)
9530 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9531 else
9532 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9533 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9534 &flashAvailable, 1);
9535
9536 Vector<uint8_t> avail_ae_modes;
9537 count = CAM_AE_MODE_MAX;
9538 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9539 for (size_t i = 0; i < count; i++) {
9540 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9541 }
9542 if (flashAvailable) {
9543 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9544 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009545 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009546 }
9547 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9548 avail_ae_modes.array(),
9549 avail_ae_modes.size());
9550
9551 int32_t sensitivity_range[2];
9552 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9553 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9554 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9555 sensitivity_range,
9556 sizeof(sensitivity_range) / sizeof(int32_t));
9557
9558 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9559 &gCamCapability[cameraId]->max_analog_sensitivity,
9560 1);
9561
9562 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9563 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9564 &sensor_orientation,
9565 1);
9566
9567 int32_t max_output_streams[] = {
9568 MAX_STALLING_STREAMS,
9569 MAX_PROCESSED_STREAMS,
9570 MAX_RAW_STREAMS};
9571 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9572 max_output_streams,
9573 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9574
9575 uint8_t avail_leds = 0;
9576 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9577 &avail_leds, 0);
9578
9579 uint8_t focus_dist_calibrated;
9580 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9581 gCamCapability[cameraId]->focus_dist_calibrated);
9582 if (NAME_NOT_FOUND != val) {
9583 focus_dist_calibrated = (uint8_t)val;
9584 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9585 &focus_dist_calibrated, 1);
9586 }
9587
9588 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9589 size = 0;
9590 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9591 MAX_TEST_PATTERN_CNT);
9592 for (size_t i = 0; i < count; i++) {
9593 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9594 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9595 if (NAME_NOT_FOUND != testpatternMode) {
9596 avail_testpattern_modes[size] = testpatternMode;
9597 size++;
9598 }
9599 }
9600 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9601 avail_testpattern_modes,
9602 size);
9603
9604 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9605 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9606 &max_pipeline_depth,
9607 1);
9608
9609 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9610 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9611 &partial_result_count,
9612 1);
9613
9614 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9615 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9616
9617 Vector<uint8_t> available_capabilities;
9618 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9619 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9620 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9621 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9622 if (supportBurst) {
9623 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9624 }
9625 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9626 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9627 if (hfrEnable && available_hfr_configs.array()) {
9628 available_capabilities.add(
9629 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9630 }
9631
9632 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9633 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9634 }
9635 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9636 available_capabilities.array(),
9637 available_capabilities.size());
9638
9639 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9640 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9641 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9642 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9643
9644 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9645 &aeLockAvailable, 1);
9646
9647 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9648 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9649 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9650 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9651
9652 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9653 &awbLockAvailable, 1);
9654
9655 int32_t max_input_streams = 1;
9656 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9657 &max_input_streams,
9658 1);
9659
9660 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9661 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9662 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9663 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9664 HAL_PIXEL_FORMAT_YCbCr_420_888};
9665 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9666 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9667
9668 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9669 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9670 &max_latency,
9671 1);
9672
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009673#ifndef USE_HAL_3_3
9674 int32_t isp_sensitivity_range[2];
9675 isp_sensitivity_range[0] =
9676 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9677 isp_sensitivity_range[1] =
9678 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9679 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9680 isp_sensitivity_range,
9681 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9682#endif
9683
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9685 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9686 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9687 available_hot_pixel_modes,
9688 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9689
9690 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9691 ANDROID_SHADING_MODE_FAST,
9692 ANDROID_SHADING_MODE_HIGH_QUALITY};
9693 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9694 available_shading_modes,
9695 3);
9696
9697 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9698 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9699 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9700 available_lens_shading_map_modes,
9701 2);
9702
9703 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9704 ANDROID_EDGE_MODE_FAST,
9705 ANDROID_EDGE_MODE_HIGH_QUALITY,
9706 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9707 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9708 available_edge_modes,
9709 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9710
9711 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9712 ANDROID_NOISE_REDUCTION_MODE_FAST,
9713 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9714 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9715 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9716 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9717 available_noise_red_modes,
9718 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9719
9720 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9721 ANDROID_TONEMAP_MODE_FAST,
9722 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9723 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9724 available_tonemap_modes,
9725 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9726
9727 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9728 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9729 available_hot_pixel_map_modes,
9730 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9731
9732 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9733 gCamCapability[cameraId]->reference_illuminant1);
9734 if (NAME_NOT_FOUND != val) {
9735 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9736 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9737 }
9738
9739 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9740 gCamCapability[cameraId]->reference_illuminant2);
9741 if (NAME_NOT_FOUND != val) {
9742 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9743 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9744 }
9745
9746 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9747 (void *)gCamCapability[cameraId]->forward_matrix1,
9748 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9749
9750 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9751 (void *)gCamCapability[cameraId]->forward_matrix2,
9752 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9753
9754 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9755 (void *)gCamCapability[cameraId]->color_transform1,
9756 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9757
9758 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9759 (void *)gCamCapability[cameraId]->color_transform2,
9760 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9761
9762 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9763 (void *)gCamCapability[cameraId]->calibration_transform1,
9764 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9765
9766 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9767 (void *)gCamCapability[cameraId]->calibration_transform2,
9768 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9769
9770 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9771 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9772 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9773 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9774 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9775 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9776 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9777 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9778 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9779 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9780 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9781 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9782 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9783 ANDROID_JPEG_GPS_COORDINATES,
9784 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9785 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9786 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9787 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9788 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9789 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9790 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9791 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9792 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9793 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009794#ifndef USE_HAL_3_3
9795 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9796#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009797 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009798 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009799 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9800 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009801 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009802 /* DevCamDebug metadata request_keys_basic */
9803 DEVCAMDEBUG_META_ENABLE,
9804 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009805 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9806 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009807 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009808
9809 size_t request_keys_cnt =
9810 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9811 Vector<int32_t> available_request_keys;
9812 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9813 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9814 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9815 }
9816
9817 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9818 available_request_keys.array(), available_request_keys.size());
9819
9820 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9821 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9822 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9823 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9824 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9825 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9826 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9827 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9828 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9829 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9830 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9831 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9832 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9833 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9834 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9835 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9836 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009837 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009838 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9839 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9840 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009841 ANDROID_STATISTICS_FACE_SCORES,
9842#ifndef USE_HAL_3_3
9843 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9844#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009845 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009846 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009847 // DevCamDebug metadata result_keys_basic
9848 DEVCAMDEBUG_META_ENABLE,
9849 // DevCamDebug metadata result_keys AF
9850 DEVCAMDEBUG_AF_LENS_POSITION,
9851 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9852 DEVCAMDEBUG_AF_TOF_DISTANCE,
9853 DEVCAMDEBUG_AF_LUMA,
9854 DEVCAMDEBUG_AF_HAF_STATE,
9855 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9856 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9857 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9858 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9859 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9860 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9861 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9862 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9863 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9864 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9865 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9866 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9867 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9868 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9869 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9870 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9871 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9872 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9873 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9874 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9875 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9876 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9877 // DevCamDebug metadata result_keys AEC
9878 DEVCAMDEBUG_AEC_TARGET_LUMA,
9879 DEVCAMDEBUG_AEC_COMP_LUMA,
9880 DEVCAMDEBUG_AEC_AVG_LUMA,
9881 DEVCAMDEBUG_AEC_CUR_LUMA,
9882 DEVCAMDEBUG_AEC_LINECOUNT,
9883 DEVCAMDEBUG_AEC_REAL_GAIN,
9884 DEVCAMDEBUG_AEC_EXP_INDEX,
9885 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009886 // DevCamDebug metadata result_keys zzHDR
9887 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9888 DEVCAMDEBUG_AEC_L_LINECOUNT,
9889 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9890 DEVCAMDEBUG_AEC_S_LINECOUNT,
9891 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9892 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9893 // DevCamDebug metadata result_keys ADRC
9894 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9895 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9896 DEVCAMDEBUG_AEC_GTM_RATIO,
9897 DEVCAMDEBUG_AEC_LTM_RATIO,
9898 DEVCAMDEBUG_AEC_LA_RATIO,
9899 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009900 // DevCamDebug metadata result_keys AWB
9901 DEVCAMDEBUG_AWB_R_GAIN,
9902 DEVCAMDEBUG_AWB_G_GAIN,
9903 DEVCAMDEBUG_AWB_B_GAIN,
9904 DEVCAMDEBUG_AWB_CCT,
9905 DEVCAMDEBUG_AWB_DECISION,
9906 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009907 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9908 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9909 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009910 };
9911
Thierry Strudel3d639192016-09-09 11:52:26 -07009912 size_t result_keys_cnt =
9913 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9914
9915 Vector<int32_t> available_result_keys;
9916 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9917 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9918 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9919 }
9920 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9921 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9922 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9923 }
9924 if (supportedFaceDetectMode == 1) {
9925 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9926 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9927 } else if ((supportedFaceDetectMode == 2) ||
9928 (supportedFaceDetectMode == 3)) {
9929 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9930 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9931 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009932#ifndef USE_HAL_3_3
9933 if (hasBlackRegions) {
9934 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9935 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9936 }
9937#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009938 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9939 available_result_keys.array(), available_result_keys.size());
9940
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009941 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009942 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9943 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9944 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9945 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9946 ANDROID_SCALER_CROPPING_TYPE,
9947 ANDROID_SYNC_MAX_LATENCY,
9948 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9949 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9950 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9951 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9952 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9953 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9954 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9955 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9956 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9957 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9958 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9959 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9960 ANDROID_LENS_FACING,
9961 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9962 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9963 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9964 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9965 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9966 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9967 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9968 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9969 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9970 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9971 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9972 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9973 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9974 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9975 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9976 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9977 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9978 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9979 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9980 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009981 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009982 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9983 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9984 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9985 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9986 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9987 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9988 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9989 ANDROID_CONTROL_AVAILABLE_MODES,
9990 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9991 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9992 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9993 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009994 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9995#ifndef USE_HAL_3_3
9996 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9997 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9998#endif
9999 };
10000
10001 Vector<int32_t> available_characteristics_keys;
10002 available_characteristics_keys.appendArray(characteristics_keys_basic,
10003 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10004#ifndef USE_HAL_3_3
10005 if (hasBlackRegions) {
10006 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10007 }
10008#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010009
10010 if (0 <= indexPD) {
10011 int32_t depthKeys[] = {
10012 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10013 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10014 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10015 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10016 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10017 };
10018 available_characteristics_keys.appendArray(depthKeys,
10019 sizeof(depthKeys) / sizeof(depthKeys[0]));
10020 }
10021
Thierry Strudel3d639192016-09-09 11:52:26 -070010022 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010023 available_characteristics_keys.array(),
10024 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010025
10026 /*available stall durations depend on the hw + sw and will be different for different devices */
10027 /*have to add for raw after implementation*/
10028 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10029 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10030
10031 Vector<int64_t> available_stall_durations;
10032 for (uint32_t j = 0; j < stall_formats_count; j++) {
10033 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10034 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10035 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10036 available_stall_durations.add(stall_formats[j]);
10037 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10038 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10039 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10040 }
10041 } else {
10042 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10043 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10044 available_stall_durations.add(stall_formats[j]);
10045 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10046 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10047 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10048 }
10049 }
10050 }
10051 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10052 available_stall_durations.array(),
10053 available_stall_durations.size());
10054
10055 //QCAMERA3_OPAQUE_RAW
10056 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10057 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10058 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10059 case LEGACY_RAW:
10060 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10061 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10062 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10063 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10064 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10065 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10066 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10067 break;
10068 case MIPI_RAW:
10069 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10070 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10071 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10072 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10073 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10074 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10075 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10076 break;
10077 default:
10078 LOGE("unknown opaque_raw_format %d",
10079 gCamCapability[cameraId]->opaque_raw_fmt);
10080 break;
10081 }
10082 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10083
10084 Vector<int32_t> strides;
10085 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10086 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10087 cam_stream_buf_plane_info_t buf_planes;
10088 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10089 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10090 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10091 &gCamCapability[cameraId]->padding_info, &buf_planes);
10092 strides.add(buf_planes.plane_info.mp[0].stride);
10093 }
10094 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10095 strides.size());
10096
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010097 //TBD: remove the following line once backend advertises zzHDR in feature mask
10098 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010099 //Video HDR default
10100 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10101 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010102 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010103 int32_t vhdr_mode[] = {
10104 QCAMERA3_VIDEO_HDR_MODE_OFF,
10105 QCAMERA3_VIDEO_HDR_MODE_ON};
10106
10107 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10108 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10109 vhdr_mode, vhdr_mode_count);
10110 }
10111
Thierry Strudel3d639192016-09-09 11:52:26 -070010112 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10113 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10114 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10115
10116 uint8_t isMonoOnly =
10117 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10118 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10119 &isMonoOnly, 1);
10120
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010121#ifndef USE_HAL_3_3
10122 Vector<int32_t> opaque_size;
10123 for (size_t j = 0; j < scalar_formats_count; j++) {
10124 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10125 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10126 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10127 cam_stream_buf_plane_info_t buf_planes;
10128
10129 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10130 &gCamCapability[cameraId]->padding_info, &buf_planes);
10131
10132 if (rc == 0) {
10133 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10134 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10135 opaque_size.add(buf_planes.plane_info.frame_len);
10136 }else {
10137 LOGE("raw frame calculation failed!");
10138 }
10139 }
10140 }
10141 }
10142
10143 if ((opaque_size.size() > 0) &&
10144 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10145 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10146 else
10147 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10148#endif
10149
Thierry Strudel04e026f2016-10-10 11:27:36 -070010150 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10151 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10152 size = 0;
10153 count = CAM_IR_MODE_MAX;
10154 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10155 for (size_t i = 0; i < count; i++) {
10156 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10157 gCamCapability[cameraId]->supported_ir_modes[i]);
10158 if (NAME_NOT_FOUND != val) {
10159 avail_ir_modes[size] = (int32_t)val;
10160 size++;
10161 }
10162 }
10163 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10164 avail_ir_modes, size);
10165 }
10166
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010167 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10168 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10169 size = 0;
10170 count = CAM_AEC_CONVERGENCE_MAX;
10171 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10172 for (size_t i = 0; i < count; i++) {
10173 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10174 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10175 if (NAME_NOT_FOUND != val) {
10176 available_instant_aec_modes[size] = (int32_t)val;
10177 size++;
10178 }
10179 }
10180 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10181 available_instant_aec_modes, size);
10182 }
10183
Thierry Strudel54dc9782017-02-15 12:12:10 -080010184 int32_t sharpness_range[] = {
10185 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10186 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10187 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10188
10189 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10190 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10191 size = 0;
10192 count = CAM_BINNING_CORRECTION_MODE_MAX;
10193 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10194 for (size_t i = 0; i < count; i++) {
10195 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10196 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10197 gCamCapability[cameraId]->supported_binning_modes[i]);
10198 if (NAME_NOT_FOUND != val) {
10199 avail_binning_modes[size] = (int32_t)val;
10200 size++;
10201 }
10202 }
10203 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10204 avail_binning_modes, size);
10205 }
10206
10207 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10208 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10209 size = 0;
10210 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10211 for (size_t i = 0; i < count; i++) {
10212 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10213 gCamCapability[cameraId]->supported_aec_modes[i]);
10214 if (NAME_NOT_FOUND != val)
10215 available_aec_modes[size++] = val;
10216 }
10217 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10218 available_aec_modes, size);
10219 }
10220
10221 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10222 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10223 size = 0;
10224 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10225 for (size_t i = 0; i < count; i++) {
10226 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10227 gCamCapability[cameraId]->supported_iso_modes[i]);
10228 if (NAME_NOT_FOUND != val)
10229 available_iso_modes[size++] = val;
10230 }
10231 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10232 available_iso_modes, size);
10233 }
10234
10235 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10236 for (size_t i = 0; i < count; i++)
10237 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10238 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10239 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10240
10241 int32_t available_saturation_range[4];
10242 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10243 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10244 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10245 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10246 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10247 available_saturation_range, 4);
10248
10249 uint8_t is_hdr_values[2];
10250 is_hdr_values[0] = 0;
10251 is_hdr_values[1] = 1;
10252 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10253 is_hdr_values, 2);
10254
10255 float is_hdr_confidence_range[2];
10256 is_hdr_confidence_range[0] = 0.0;
10257 is_hdr_confidence_range[1] = 1.0;
10258 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10259 is_hdr_confidence_range, 2);
10260
Emilian Peev0a972ef2017-03-16 10:25:53 +000010261 size_t eepromLength = strnlen(
10262 reinterpret_cast<const char *>(
10263 gCamCapability[cameraId]->eeprom_version_info),
10264 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10265 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010266 char easelInfo[] = ",E:N";
10267 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10268 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10269 eepromLength += sizeof(easelInfo);
10270 strlcat(eepromInfo, (gHdrPlusClient ? ",E:Y" : ",E:N"), MAX_EEPROM_VERSION_INFO_LEN);
10271 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010272 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10273 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10274 }
10275
Thierry Strudel3d639192016-09-09 11:52:26 -070010276 gStaticMetadata[cameraId] = staticInfo.release();
10277 return rc;
10278}
10279
10280/*===========================================================================
10281 * FUNCTION : makeTable
10282 *
10283 * DESCRIPTION: make a table of sizes
10284 *
10285 * PARAMETERS :
10286 *
10287 *
10288 *==========================================================================*/
10289void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10290 size_t max_size, int32_t *sizeTable)
10291{
10292 size_t j = 0;
10293 if (size > max_size) {
10294 size = max_size;
10295 }
10296 for (size_t i = 0; i < size; i++) {
10297 sizeTable[j] = dimTable[i].width;
10298 sizeTable[j+1] = dimTable[i].height;
10299 j+=2;
10300 }
10301}
10302
10303/*===========================================================================
10304 * FUNCTION : makeFPSTable
10305 *
10306 * DESCRIPTION: make a table of fps ranges
10307 *
10308 * PARAMETERS :
10309 *
10310 *==========================================================================*/
10311void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10312 size_t max_size, int32_t *fpsRangesTable)
10313{
10314 size_t j = 0;
10315 if (size > max_size) {
10316 size = max_size;
10317 }
10318 for (size_t i = 0; i < size; i++) {
10319 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10320 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10321 j+=2;
10322 }
10323}
10324
10325/*===========================================================================
10326 * FUNCTION : makeOverridesList
10327 *
10328 * DESCRIPTION: make a list of scene mode overrides
10329 *
10330 * PARAMETERS :
10331 *
10332 *
10333 *==========================================================================*/
10334void QCamera3HardwareInterface::makeOverridesList(
10335 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10336 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10337{
10338 /*daemon will give a list of overrides for all scene modes.
10339 However we should send the fwk only the overrides for the scene modes
10340 supported by the framework*/
10341 size_t j = 0;
10342 if (size > max_size) {
10343 size = max_size;
10344 }
10345 size_t focus_count = CAM_FOCUS_MODE_MAX;
10346 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10347 focus_count);
10348 for (size_t i = 0; i < size; i++) {
10349 bool supt = false;
10350 size_t index = supported_indexes[i];
10351 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10352 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10353 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10354 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10355 overridesTable[index].awb_mode);
10356 if (NAME_NOT_FOUND != val) {
10357 overridesList[j+1] = (uint8_t)val;
10358 }
10359 uint8_t focus_override = overridesTable[index].af_mode;
10360 for (size_t k = 0; k < focus_count; k++) {
10361 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10362 supt = true;
10363 break;
10364 }
10365 }
10366 if (supt) {
10367 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10368 focus_override);
10369 if (NAME_NOT_FOUND != val) {
10370 overridesList[j+2] = (uint8_t)val;
10371 }
10372 } else {
10373 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10374 }
10375 j+=3;
10376 }
10377}
10378
10379/*===========================================================================
10380 * FUNCTION : filterJpegSizes
10381 *
10382 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10383 * could be downscaled to
10384 *
10385 * PARAMETERS :
10386 *
10387 * RETURN : length of jpegSizes array
10388 *==========================================================================*/
10389
10390size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10391 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10392 uint8_t downscale_factor)
10393{
10394 if (0 == downscale_factor) {
10395 downscale_factor = 1;
10396 }
10397
10398 int32_t min_width = active_array_size.width / downscale_factor;
10399 int32_t min_height = active_array_size.height / downscale_factor;
10400 size_t jpegSizesCnt = 0;
10401 if (processedSizesCnt > maxCount) {
10402 processedSizesCnt = maxCount;
10403 }
10404 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10405 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10406 jpegSizes[jpegSizesCnt] = processedSizes[i];
10407 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10408 jpegSizesCnt += 2;
10409 }
10410 }
10411 return jpegSizesCnt;
10412}
10413
10414/*===========================================================================
10415 * FUNCTION : computeNoiseModelEntryS
10416 *
10417 * DESCRIPTION: function to map a given sensitivity to the S noise
10418 * model parameters in the DNG noise model.
10419 *
10420 * PARAMETERS : sens : the sensor sensitivity
10421 *
10422 ** RETURN : S (sensor amplification) noise
10423 *
10424 *==========================================================================*/
10425double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10426 double s = gCamCapability[mCameraId]->gradient_S * sens +
10427 gCamCapability[mCameraId]->offset_S;
10428 return ((s < 0.0) ? 0.0 : s);
10429}
10430
10431/*===========================================================================
10432 * FUNCTION : computeNoiseModelEntryO
10433 *
10434 * DESCRIPTION: function to map a given sensitivity to the O noise
10435 * model parameters in the DNG noise model.
10436 *
10437 * PARAMETERS : sens : the sensor sensitivity
10438 *
10439 ** RETURN : O (sensor readout) noise
10440 *
10441 *==========================================================================*/
10442double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10443 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10444 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10445 1.0 : (1.0 * sens / max_analog_sens);
10446 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10447 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10448 return ((o < 0.0) ? 0.0 : o);
10449}
10450
10451/*===========================================================================
10452 * FUNCTION : getSensorSensitivity
10453 *
10454 * DESCRIPTION: convert iso_mode to an integer value
10455 *
10456 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10457 *
10458 ** RETURN : sensitivity supported by sensor
10459 *
10460 *==========================================================================*/
10461int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10462{
10463 int32_t sensitivity;
10464
10465 switch (iso_mode) {
10466 case CAM_ISO_MODE_100:
10467 sensitivity = 100;
10468 break;
10469 case CAM_ISO_MODE_200:
10470 sensitivity = 200;
10471 break;
10472 case CAM_ISO_MODE_400:
10473 sensitivity = 400;
10474 break;
10475 case CAM_ISO_MODE_800:
10476 sensitivity = 800;
10477 break;
10478 case CAM_ISO_MODE_1600:
10479 sensitivity = 1600;
10480 break;
10481 default:
10482 sensitivity = -1;
10483 break;
10484 }
10485 return sensitivity;
10486}
10487
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010488int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10489 if (gHdrPlusClient != nullptr) {
10490 return OK;
10491 }
10492
10493 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10494 if (gHdrPlusClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010495 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10496 // to connect to Easel.
10497 bool doNotpowerOnEasel =
10498 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10499
10500 if (doNotpowerOnEasel) {
10501 gHdrPlusClient = nullptr;
10502 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10503 return OK;
10504 }
10505
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010506 // If Easel is present, power on Easel and suspend it immediately.
10507 status_t res = gHdrPlusClient->powerOnEasel();
10508 if (res != OK) {
10509 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10510 gHdrPlusClient = nullptr;
10511 return res;
10512 }
10513
10514 res = gHdrPlusClient->suspendEasel();
10515 if (res != OK) {
10516 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10517 }
10518
10519 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10520 } else {
10521 // Destroy HDR+ client if Easel isn't present.
10522 gHdrPlusClient = nullptr;
10523 }
10524
10525 return OK;
10526}
10527
Thierry Strudel3d639192016-09-09 11:52:26 -070010528/*===========================================================================
10529 * FUNCTION : getCamInfo
10530 *
10531 * DESCRIPTION: query camera capabilities
10532 *
10533 * PARAMETERS :
10534 * @cameraId : camera Id
10535 * @info : camera info struct to be filled in with camera capabilities
10536 *
10537 * RETURN : int type of status
10538 * NO_ERROR -- success
10539 * none-zero failure code
10540 *==========================================================================*/
10541int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10542 struct camera_info *info)
10543{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010544 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010545 int rc = 0;
10546
10547 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010548
10549 rc = initHdrPlusClientLocked();
10550 if (rc != OK) {
10551 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10552 pthread_mutex_unlock(&gCamLock);
10553 return rc;
10554 }
10555
Thierry Strudel3d639192016-09-09 11:52:26 -070010556 if (NULL == gCamCapability[cameraId]) {
10557 rc = initCapabilities(cameraId);
10558 if (rc < 0) {
10559 pthread_mutex_unlock(&gCamLock);
10560 return rc;
10561 }
10562 }
10563
10564 if (NULL == gStaticMetadata[cameraId]) {
10565 rc = initStaticMetadata(cameraId);
10566 if (rc < 0) {
10567 pthread_mutex_unlock(&gCamLock);
10568 return rc;
10569 }
10570 }
10571
10572 switch(gCamCapability[cameraId]->position) {
10573 case CAM_POSITION_BACK:
10574 case CAM_POSITION_BACK_AUX:
10575 info->facing = CAMERA_FACING_BACK;
10576 break;
10577
10578 case CAM_POSITION_FRONT:
10579 case CAM_POSITION_FRONT_AUX:
10580 info->facing = CAMERA_FACING_FRONT;
10581 break;
10582
10583 default:
10584 LOGE("Unknown position type %d for camera id:%d",
10585 gCamCapability[cameraId]->position, cameraId);
10586 rc = -1;
10587 break;
10588 }
10589
10590
10591 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010592#ifndef USE_HAL_3_3
10593 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10594#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010595 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010597 info->static_camera_characteristics = gStaticMetadata[cameraId];
10598
10599 //For now assume both cameras can operate independently.
10600 info->conflicting_devices = NULL;
10601 info->conflicting_devices_length = 0;
10602
10603 //resource cost is 100 * MIN(1.0, m/M),
10604 //where m is throughput requirement with maximum stream configuration
10605 //and M is CPP maximum throughput.
10606 float max_fps = 0.0;
10607 for (uint32_t i = 0;
10608 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10609 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10610 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10611 }
10612 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10613 gCamCapability[cameraId]->active_array_size.width *
10614 gCamCapability[cameraId]->active_array_size.height * max_fps /
10615 gCamCapability[cameraId]->max_pixel_bandwidth;
10616 info->resource_cost = 100 * MIN(1.0, ratio);
10617 LOGI("camera %d resource cost is %d", cameraId,
10618 info->resource_cost);
10619
10620 pthread_mutex_unlock(&gCamLock);
10621 return rc;
10622}
10623
10624/*===========================================================================
10625 * FUNCTION : translateCapabilityToMetadata
10626 *
10627 * DESCRIPTION: translate the capability into camera_metadata_t
10628 *
10629 * PARAMETERS : type of the request
10630 *
10631 *
10632 * RETURN : success: camera_metadata_t*
10633 * failure: NULL
10634 *
10635 *==========================================================================*/
10636camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10637{
10638 if (mDefaultMetadata[type] != NULL) {
10639 return mDefaultMetadata[type];
10640 }
10641 //first time we are handling this request
10642 //fill up the metadata structure using the wrapper class
10643 CameraMetadata settings;
10644 //translate from cam_capability_t to camera_metadata_tag_t
10645 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10646 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10647 int32_t defaultRequestID = 0;
10648 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10649
10650 /* OIS disable */
10651 char ois_prop[PROPERTY_VALUE_MAX];
10652 memset(ois_prop, 0, sizeof(ois_prop));
10653 property_get("persist.camera.ois.disable", ois_prop, "0");
10654 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10655
10656 /* Force video to use OIS */
10657 char videoOisProp[PROPERTY_VALUE_MAX];
10658 memset(videoOisProp, 0, sizeof(videoOisProp));
10659 property_get("persist.camera.ois.video", videoOisProp, "1");
10660 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010661
10662 // Hybrid AE enable/disable
10663 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10664 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10665 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10666 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10667
Thierry Strudel3d639192016-09-09 11:52:26 -070010668 uint8_t controlIntent = 0;
10669 uint8_t focusMode;
10670 uint8_t vsMode;
10671 uint8_t optStabMode;
10672 uint8_t cacMode;
10673 uint8_t edge_mode;
10674 uint8_t noise_red_mode;
10675 uint8_t tonemap_mode;
10676 bool highQualityModeEntryAvailable = FALSE;
10677 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010678 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010679 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10680 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010681 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010682
Thierry Strudel3d639192016-09-09 11:52:26 -070010683 switch (type) {
10684 case CAMERA3_TEMPLATE_PREVIEW:
10685 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10686 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10687 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10688 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10689 edge_mode = ANDROID_EDGE_MODE_FAST;
10690 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10691 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10692 break;
10693 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10694 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10695 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10696 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10697 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10698 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10699 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10700 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10701 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10702 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10703 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10704 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10705 highQualityModeEntryAvailable = TRUE;
10706 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10707 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10708 fastModeEntryAvailable = TRUE;
10709 }
10710 }
10711 if (highQualityModeEntryAvailable) {
10712 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10713 } else if (fastModeEntryAvailable) {
10714 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10715 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010716 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10717 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10718 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010719 break;
10720 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10721 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10722 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10723 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010724 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10725 edge_mode = ANDROID_EDGE_MODE_FAST;
10726 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10727 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10728 if (forceVideoOis)
10729 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10730 break;
10731 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10732 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10733 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10734 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010735 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10736 edge_mode = ANDROID_EDGE_MODE_FAST;
10737 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10738 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10739 if (forceVideoOis)
10740 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10741 break;
10742 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10743 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10744 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10745 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10746 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10747 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10748 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10749 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10750 break;
10751 case CAMERA3_TEMPLATE_MANUAL:
10752 edge_mode = ANDROID_EDGE_MODE_FAST;
10753 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10754 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10755 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10756 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10757 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10758 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10759 break;
10760 default:
10761 edge_mode = ANDROID_EDGE_MODE_FAST;
10762 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10763 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10764 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10765 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10766 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10767 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10768 break;
10769 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010770 // Set CAC to OFF if underlying device doesn't support
10771 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10772 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10773 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010774 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10775 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10776 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10777 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10778 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10779 }
10780 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010781 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010782
10783 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10784 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10785 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10786 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10787 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10788 || ois_disable)
10789 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10790 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010791 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010792
10793 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10794 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10795
10796 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10797 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10798
10799 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10800 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10801
10802 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10803 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10804
10805 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10806 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10807
10808 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10809 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10810
10811 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10812 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10813
10814 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10815 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10816
10817 /*flash*/
10818 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10819 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10820
10821 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10822 settings.update(ANDROID_FLASH_FIRING_POWER,
10823 &flashFiringLevel, 1);
10824
10825 /* lens */
10826 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10827 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10828
10829 if (gCamCapability[mCameraId]->filter_densities_count) {
10830 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10831 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10832 gCamCapability[mCameraId]->filter_densities_count);
10833 }
10834
10835 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10836 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10837
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10839 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10840
10841 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10842 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10843
10844 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10845 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10846
10847 /* face detection (default to OFF) */
10848 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10849 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10850
Thierry Strudel54dc9782017-02-15 12:12:10 -080010851 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10852 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010853
10854 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10855 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10856
10857 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10858 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10859
Thierry Strudel3d639192016-09-09 11:52:26 -070010860
10861 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10862 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10863
10864 /* Exposure time(Update the Min Exposure Time)*/
10865 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10866 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10867
10868 /* frame duration */
10869 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10870 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10871
10872 /* sensitivity */
10873 static const int32_t default_sensitivity = 100;
10874 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010875#ifndef USE_HAL_3_3
10876 static const int32_t default_isp_sensitivity =
10877 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10878 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10879#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010880
10881 /*edge mode*/
10882 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10883
10884 /*noise reduction mode*/
10885 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10886
10887 /*color correction mode*/
10888 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10889 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10890
10891 /*transform matrix mode*/
10892 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10893
10894 int32_t scaler_crop_region[4];
10895 scaler_crop_region[0] = 0;
10896 scaler_crop_region[1] = 0;
10897 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10898 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10899 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10900
10901 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10902 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10903
10904 /*focus distance*/
10905 float focus_distance = 0.0;
10906 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10907
10908 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010909 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010910 float max_range = 0.0;
10911 float max_fixed_fps = 0.0;
10912 int32_t fps_range[2] = {0, 0};
10913 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10914 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010915 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10916 TEMPLATE_MAX_PREVIEW_FPS) {
10917 continue;
10918 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010919 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10920 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10921 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10922 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10923 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10924 if (range > max_range) {
10925 fps_range[0] =
10926 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10927 fps_range[1] =
10928 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10929 max_range = range;
10930 }
10931 } else {
10932 if (range < 0.01 && max_fixed_fps <
10933 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10934 fps_range[0] =
10935 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10936 fps_range[1] =
10937 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10938 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10939 }
10940 }
10941 }
10942 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10943
10944 /*precapture trigger*/
10945 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10946 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10947
10948 /*af trigger*/
10949 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10950 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10951
10952 /* ae & af regions */
10953 int32_t active_region[] = {
10954 gCamCapability[mCameraId]->active_array_size.left,
10955 gCamCapability[mCameraId]->active_array_size.top,
10956 gCamCapability[mCameraId]->active_array_size.left +
10957 gCamCapability[mCameraId]->active_array_size.width,
10958 gCamCapability[mCameraId]->active_array_size.top +
10959 gCamCapability[mCameraId]->active_array_size.height,
10960 0};
10961 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10962 sizeof(active_region) / sizeof(active_region[0]));
10963 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10964 sizeof(active_region) / sizeof(active_region[0]));
10965
10966 /* black level lock */
10967 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10968 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10969
Thierry Strudel3d639192016-09-09 11:52:26 -070010970 //special defaults for manual template
10971 if (type == CAMERA3_TEMPLATE_MANUAL) {
10972 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10973 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10974
10975 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10976 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10977
10978 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10979 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10980
10981 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10982 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10983
10984 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10985 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10986
10987 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10988 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10989 }
10990
10991
10992 /* TNR
10993 * We'll use this location to determine which modes TNR will be set.
10994 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10995 * This is not to be confused with linking on a per stream basis that decision
10996 * is still on per-session basis and will be handled as part of config stream
10997 */
10998 uint8_t tnr_enable = 0;
10999
11000 if (m_bTnrPreview || m_bTnrVideo) {
11001
11002 switch (type) {
11003 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11004 tnr_enable = 1;
11005 break;
11006
11007 default:
11008 tnr_enable = 0;
11009 break;
11010 }
11011
11012 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11013 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11014 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11015
11016 LOGD("TNR:%d with process plate %d for template:%d",
11017 tnr_enable, tnr_process_type, type);
11018 }
11019
11020 //Update Link tags to default
11021 int32_t sync_type = CAM_TYPE_STANDALONE;
11022 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11023
11024 int32_t is_main = 0; //this doesn't matter as app should overwrite
11025 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11026
11027 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11028
11029 /* CDS default */
11030 char prop[PROPERTY_VALUE_MAX];
11031 memset(prop, 0, sizeof(prop));
11032 property_get("persist.camera.CDS", prop, "Auto");
11033 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11034 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11035 if (CAM_CDS_MODE_MAX == cds_mode) {
11036 cds_mode = CAM_CDS_MODE_AUTO;
11037 }
11038
11039 /* Disabling CDS in templates which have TNR enabled*/
11040 if (tnr_enable)
11041 cds_mode = CAM_CDS_MODE_OFF;
11042
11043 int32_t mode = cds_mode;
11044 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011045
Thierry Strudel269c81a2016-10-12 12:13:59 -070011046 /* Manual Convergence AEC Speed is disabled by default*/
11047 float default_aec_speed = 0;
11048 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11049
11050 /* Manual Convergence AWB Speed is disabled by default*/
11051 float default_awb_speed = 0;
11052 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11053
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011054 // Set instant AEC to normal convergence by default
11055 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11056 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11057
Shuzhen Wang19463d72016-03-08 11:09:52 -080011058 /* hybrid ae */
11059 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11060
Thierry Strudel3d639192016-09-09 11:52:26 -070011061 mDefaultMetadata[type] = settings.release();
11062
11063 return mDefaultMetadata[type];
11064}
11065
11066/*===========================================================================
11067 * FUNCTION : setFrameParameters
11068 *
11069 * DESCRIPTION: set parameters per frame as requested in the metadata from
11070 * framework
11071 *
11072 * PARAMETERS :
11073 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011074 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011075 * @blob_request: Whether this request is a blob request or not
11076 *
11077 * RETURN : success: NO_ERROR
11078 * failure:
11079 *==========================================================================*/
11080int QCamera3HardwareInterface::setFrameParameters(
11081 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011082 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011083 int blob_request,
11084 uint32_t snapshotStreamId)
11085{
11086 /*translate from camera_metadata_t type to parm_type_t*/
11087 int rc = 0;
11088 int32_t hal_version = CAM_HAL_V3;
11089
11090 clear_metadata_buffer(mParameters);
11091 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11092 LOGE("Failed to set hal version in the parameters");
11093 return BAD_VALUE;
11094 }
11095
11096 /*we need to update the frame number in the parameters*/
11097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11098 request->frame_number)) {
11099 LOGE("Failed to set the frame number in the parameters");
11100 return BAD_VALUE;
11101 }
11102
11103 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011105 LOGE("Failed to set stream type mask in the parameters");
11106 return BAD_VALUE;
11107 }
11108
11109 if (mUpdateDebugLevel) {
11110 uint32_t dummyDebugLevel = 0;
11111 /* The value of dummyDebugLevel is irrelavent. On
11112 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11114 dummyDebugLevel)) {
11115 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11116 return BAD_VALUE;
11117 }
11118 mUpdateDebugLevel = false;
11119 }
11120
11121 if(request->settings != NULL){
11122 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11123 if (blob_request)
11124 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11125 }
11126
11127 return rc;
11128}
11129
11130/*===========================================================================
11131 * FUNCTION : setReprocParameters
11132 *
11133 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11134 * return it.
11135 *
11136 * PARAMETERS :
11137 * @request : request that needs to be serviced
11138 *
11139 * RETURN : success: NO_ERROR
11140 * failure:
11141 *==========================================================================*/
11142int32_t QCamera3HardwareInterface::setReprocParameters(
11143 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11144 uint32_t snapshotStreamId)
11145{
11146 /*translate from camera_metadata_t type to parm_type_t*/
11147 int rc = 0;
11148
11149 if (NULL == request->settings){
11150 LOGE("Reprocess settings cannot be NULL");
11151 return BAD_VALUE;
11152 }
11153
11154 if (NULL == reprocParam) {
11155 LOGE("Invalid reprocessing metadata buffer");
11156 return BAD_VALUE;
11157 }
11158 clear_metadata_buffer(reprocParam);
11159
11160 /*we need to update the frame number in the parameters*/
11161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11162 request->frame_number)) {
11163 LOGE("Failed to set the frame number in the parameters");
11164 return BAD_VALUE;
11165 }
11166
11167 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11168 if (rc < 0) {
11169 LOGE("Failed to translate reproc request");
11170 return rc;
11171 }
11172
11173 CameraMetadata frame_settings;
11174 frame_settings = request->settings;
11175 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11176 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11177 int32_t *crop_count =
11178 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11179 int32_t *crop_data =
11180 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11181 int32_t *roi_map =
11182 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11183 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11184 cam_crop_data_t crop_meta;
11185 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11186 crop_meta.num_of_streams = 1;
11187 crop_meta.crop_info[0].crop.left = crop_data[0];
11188 crop_meta.crop_info[0].crop.top = crop_data[1];
11189 crop_meta.crop_info[0].crop.width = crop_data[2];
11190 crop_meta.crop_info[0].crop.height = crop_data[3];
11191
11192 crop_meta.crop_info[0].roi_map.left =
11193 roi_map[0];
11194 crop_meta.crop_info[0].roi_map.top =
11195 roi_map[1];
11196 crop_meta.crop_info[0].roi_map.width =
11197 roi_map[2];
11198 crop_meta.crop_info[0].roi_map.height =
11199 roi_map[3];
11200
11201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11202 rc = BAD_VALUE;
11203 }
11204 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11205 request->input_buffer->stream,
11206 crop_meta.crop_info[0].crop.left,
11207 crop_meta.crop_info[0].crop.top,
11208 crop_meta.crop_info[0].crop.width,
11209 crop_meta.crop_info[0].crop.height);
11210 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11211 request->input_buffer->stream,
11212 crop_meta.crop_info[0].roi_map.left,
11213 crop_meta.crop_info[0].roi_map.top,
11214 crop_meta.crop_info[0].roi_map.width,
11215 crop_meta.crop_info[0].roi_map.height);
11216 } else {
11217 LOGE("Invalid reprocess crop count %d!", *crop_count);
11218 }
11219 } else {
11220 LOGE("No crop data from matching output stream");
11221 }
11222
11223 /* These settings are not needed for regular requests so handle them specially for
11224 reprocess requests; information needed for EXIF tags */
11225 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11226 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11227 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11228 if (NAME_NOT_FOUND != val) {
11229 uint32_t flashMode = (uint32_t)val;
11230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11231 rc = BAD_VALUE;
11232 }
11233 } else {
11234 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11235 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11236 }
11237 } else {
11238 LOGH("No flash mode in reprocess settings");
11239 }
11240
11241 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11242 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11243 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11244 rc = BAD_VALUE;
11245 }
11246 } else {
11247 LOGH("No flash state in reprocess settings");
11248 }
11249
11250 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11251 uint8_t *reprocessFlags =
11252 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11254 *reprocessFlags)) {
11255 rc = BAD_VALUE;
11256 }
11257 }
11258
Thierry Strudel54dc9782017-02-15 12:12:10 -080011259 // Add exif debug data to internal metadata
11260 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11261 mm_jpeg_debug_exif_params_t *debug_params =
11262 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11263 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11264 // AE
11265 if (debug_params->ae_debug_params_valid == TRUE) {
11266 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11267 debug_params->ae_debug_params);
11268 }
11269 // AWB
11270 if (debug_params->awb_debug_params_valid == TRUE) {
11271 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11272 debug_params->awb_debug_params);
11273 }
11274 // AF
11275 if (debug_params->af_debug_params_valid == TRUE) {
11276 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11277 debug_params->af_debug_params);
11278 }
11279 // ASD
11280 if (debug_params->asd_debug_params_valid == TRUE) {
11281 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11282 debug_params->asd_debug_params);
11283 }
11284 // Stats
11285 if (debug_params->stats_debug_params_valid == TRUE) {
11286 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11287 debug_params->stats_debug_params);
11288 }
11289 // BE Stats
11290 if (debug_params->bestats_debug_params_valid == TRUE) {
11291 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11292 debug_params->bestats_debug_params);
11293 }
11294 // BHIST
11295 if (debug_params->bhist_debug_params_valid == TRUE) {
11296 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11297 debug_params->bhist_debug_params);
11298 }
11299 // 3A Tuning
11300 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11301 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11302 debug_params->q3a_tuning_debug_params);
11303 }
11304 }
11305
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011306 // Add metadata which reprocess needs
11307 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11308 cam_reprocess_info_t *repro_info =
11309 (cam_reprocess_info_t *)frame_settings.find
11310 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011311 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011312 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011313 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011314 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011315 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011316 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011317 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011318 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011319 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011320 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011321 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011322 repro_info->pipeline_flip);
11323 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11324 repro_info->af_roi);
11325 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11326 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011327 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11328 CAM_INTF_PARM_ROTATION metadata then has been added in
11329 translateToHalMetadata. HAL need to keep this new rotation
11330 metadata. Otherwise, the old rotation info saved in the vendor tag
11331 would be used */
11332 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11333 CAM_INTF_PARM_ROTATION, reprocParam) {
11334 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11335 } else {
11336 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011337 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011338 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011339 }
11340
11341 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11342 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11343 roi.width and roi.height would be the final JPEG size.
11344 For now, HAL only checks this for reprocess request */
11345 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11346 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11347 uint8_t *enable =
11348 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11349 if (*enable == TRUE) {
11350 int32_t *crop_data =
11351 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11352 cam_stream_crop_info_t crop_meta;
11353 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11354 crop_meta.stream_id = 0;
11355 crop_meta.crop.left = crop_data[0];
11356 crop_meta.crop.top = crop_data[1];
11357 crop_meta.crop.width = crop_data[2];
11358 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011359 // The JPEG crop roi should match cpp output size
11360 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11361 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11362 crop_meta.roi_map.left = 0;
11363 crop_meta.roi_map.top = 0;
11364 crop_meta.roi_map.width = cpp_crop->crop.width;
11365 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011366 }
11367 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11368 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011369 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011370 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011371 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11372 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011373 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011374 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11375
11376 // Add JPEG scale information
11377 cam_dimension_t scale_dim;
11378 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11379 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11380 int32_t *roi =
11381 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11382 scale_dim.width = roi[2];
11383 scale_dim.height = roi[3];
11384 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11385 scale_dim);
11386 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11387 scale_dim.width, scale_dim.height, mCameraId);
11388 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011389 }
11390 }
11391
11392 return rc;
11393}
11394
11395/*===========================================================================
11396 * FUNCTION : saveRequestSettings
11397 *
11398 * DESCRIPTION: Add any settings that might have changed to the request settings
11399 * and save the settings to be applied on the frame
11400 *
11401 * PARAMETERS :
11402 * @jpegMetadata : the extracted and/or modified jpeg metadata
11403 * @request : request with initial settings
11404 *
11405 * RETURN :
11406 * camera_metadata_t* : pointer to the saved request settings
11407 *==========================================================================*/
11408camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11409 const CameraMetadata &jpegMetadata,
11410 camera3_capture_request_t *request)
11411{
11412 camera_metadata_t *resultMetadata;
11413 CameraMetadata camMetadata;
11414 camMetadata = request->settings;
11415
11416 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11417 int32_t thumbnail_size[2];
11418 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11419 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11420 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11421 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11422 }
11423
11424 if (request->input_buffer != NULL) {
11425 uint8_t reprocessFlags = 1;
11426 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11427 (uint8_t*)&reprocessFlags,
11428 sizeof(reprocessFlags));
11429 }
11430
11431 resultMetadata = camMetadata.release();
11432 return resultMetadata;
11433}
11434
11435/*===========================================================================
11436 * FUNCTION : setHalFpsRange
11437 *
11438 * DESCRIPTION: set FPS range parameter
11439 *
11440 *
11441 * PARAMETERS :
11442 * @settings : Metadata from framework
11443 * @hal_metadata: Metadata buffer
11444 *
11445 *
11446 * RETURN : success: NO_ERROR
11447 * failure:
11448 *==========================================================================*/
11449int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11450 metadata_buffer_t *hal_metadata)
11451{
11452 int32_t rc = NO_ERROR;
11453 cam_fps_range_t fps_range;
11454 fps_range.min_fps = (float)
11455 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11456 fps_range.max_fps = (float)
11457 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11458 fps_range.video_min_fps = fps_range.min_fps;
11459 fps_range.video_max_fps = fps_range.max_fps;
11460
11461 LOGD("aeTargetFpsRange fps: [%f %f]",
11462 fps_range.min_fps, fps_range.max_fps);
11463 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11464 * follows:
11465 * ---------------------------------------------------------------|
11466 * Video stream is absent in configure_streams |
11467 * (Camcorder preview before the first video record |
11468 * ---------------------------------------------------------------|
11469 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11470 * | | | vid_min/max_fps|
11471 * ---------------------------------------------------------------|
11472 * NO | [ 30, 240] | 240 | [240, 240] |
11473 * |-------------|-------------|----------------|
11474 * | [240, 240] | 240 | [240, 240] |
11475 * ---------------------------------------------------------------|
11476 * Video stream is present in configure_streams |
11477 * ---------------------------------------------------------------|
11478 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11479 * | | | vid_min/max_fps|
11480 * ---------------------------------------------------------------|
11481 * NO | [ 30, 240] | 240 | [240, 240] |
11482 * (camcorder prev |-------------|-------------|----------------|
11483 * after video rec | [240, 240] | 240 | [240, 240] |
11484 * is stopped) | | | |
11485 * ---------------------------------------------------------------|
11486 * YES | [ 30, 240] | 240 | [240, 240] |
11487 * |-------------|-------------|----------------|
11488 * | [240, 240] | 240 | [240, 240] |
11489 * ---------------------------------------------------------------|
11490 * When Video stream is absent in configure_streams,
11491 * preview fps = sensor_fps / batchsize
11492 * Eg: for 240fps at batchSize 4, preview = 60fps
11493 * for 120fps at batchSize 4, preview = 30fps
11494 *
11495 * When video stream is present in configure_streams, preview fps is as per
11496 * the ratio of preview buffers to video buffers requested in process
11497 * capture request
11498 */
11499 mBatchSize = 0;
11500 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11501 fps_range.min_fps = fps_range.video_max_fps;
11502 fps_range.video_min_fps = fps_range.video_max_fps;
11503 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11504 fps_range.max_fps);
11505 if (NAME_NOT_FOUND != val) {
11506 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11508 return BAD_VALUE;
11509 }
11510
11511 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11512 /* If batchmode is currently in progress and the fps changes,
11513 * set the flag to restart the sensor */
11514 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11515 (mHFRVideoFps != fps_range.max_fps)) {
11516 mNeedSensorRestart = true;
11517 }
11518 mHFRVideoFps = fps_range.max_fps;
11519 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11520 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11521 mBatchSize = MAX_HFR_BATCH_SIZE;
11522 }
11523 }
11524 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11525
11526 }
11527 } else {
11528 /* HFR mode is session param in backend/ISP. This should be reset when
11529 * in non-HFR mode */
11530 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11532 return BAD_VALUE;
11533 }
11534 }
11535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11536 return BAD_VALUE;
11537 }
11538 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11539 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11540 return rc;
11541}
11542
11543/*===========================================================================
11544 * FUNCTION : translateToHalMetadata
11545 *
11546 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11547 *
11548 *
11549 * PARAMETERS :
11550 * @request : request sent from framework
11551 *
11552 *
11553 * RETURN : success: NO_ERROR
11554 * failure:
11555 *==========================================================================*/
11556int QCamera3HardwareInterface::translateToHalMetadata
11557 (const camera3_capture_request_t *request,
11558 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011559 uint32_t snapshotStreamId) {
11560 if (request == nullptr || hal_metadata == nullptr) {
11561 return BAD_VALUE;
11562 }
11563
11564 int64_t minFrameDuration = getMinFrameDuration(request);
11565
11566 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11567 minFrameDuration);
11568}
11569
11570int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11571 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11572 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11573
Thierry Strudel3d639192016-09-09 11:52:26 -070011574 int rc = 0;
11575 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011576 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011577
11578 /* Do not change the order of the following list unless you know what you are
11579 * doing.
11580 * The order is laid out in such a way that parameters in the front of the table
11581 * may be used to override the parameters later in the table. Examples are:
11582 * 1. META_MODE should precede AEC/AWB/AF MODE
11583 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11584 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11585 * 4. Any mode should precede it's corresponding settings
11586 */
11587 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11588 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11590 rc = BAD_VALUE;
11591 }
11592 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11593 if (rc != NO_ERROR) {
11594 LOGE("extractSceneMode failed");
11595 }
11596 }
11597
11598 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11599 uint8_t fwk_aeMode =
11600 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11601 uint8_t aeMode;
11602 int32_t redeye;
11603
11604 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11605 aeMode = CAM_AE_MODE_OFF;
11606 } else {
11607 aeMode = CAM_AE_MODE_ON;
11608 }
11609 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11610 redeye = 1;
11611 } else {
11612 redeye = 0;
11613 }
11614
11615 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11616 fwk_aeMode);
11617 if (NAME_NOT_FOUND != val) {
11618 int32_t flashMode = (int32_t)val;
11619 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11620 }
11621
11622 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11624 rc = BAD_VALUE;
11625 }
11626 }
11627
11628 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11629 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11630 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11631 fwk_whiteLevel);
11632 if (NAME_NOT_FOUND != val) {
11633 uint8_t whiteLevel = (uint8_t)val;
11634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11635 rc = BAD_VALUE;
11636 }
11637 }
11638 }
11639
11640 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11641 uint8_t fwk_cacMode =
11642 frame_settings.find(
11643 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11644 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11645 fwk_cacMode);
11646 if (NAME_NOT_FOUND != val) {
11647 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11648 bool entryAvailable = FALSE;
11649 // Check whether Frameworks set CAC mode is supported in device or not
11650 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11651 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11652 entryAvailable = TRUE;
11653 break;
11654 }
11655 }
11656 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11657 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11658 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11659 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11660 if (entryAvailable == FALSE) {
11661 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11662 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11663 } else {
11664 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11665 // High is not supported and so set the FAST as spec say's underlying
11666 // device implementation can be the same for both modes.
11667 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11668 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11669 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11670 // in order to avoid the fps drop due to high quality
11671 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11672 } else {
11673 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11674 }
11675 }
11676 }
11677 LOGD("Final cacMode is %d", cacMode);
11678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11679 rc = BAD_VALUE;
11680 }
11681 } else {
11682 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11683 }
11684 }
11685
Thierry Strudel2896d122017-02-23 19:18:03 -080011686 char af_value[PROPERTY_VALUE_MAX];
11687 property_get("persist.camera.af.infinity", af_value, "0");
11688
Jason Lee84ae9972017-02-24 13:24:24 -080011689 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011690 if (atoi(af_value) == 0) {
11691 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011692 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011693 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11694 fwk_focusMode);
11695 if (NAME_NOT_FOUND != val) {
11696 uint8_t focusMode = (uint8_t)val;
11697 LOGD("set focus mode %d", focusMode);
11698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11699 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11700 rc = BAD_VALUE;
11701 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011702 }
11703 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011704 } else {
11705 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11706 LOGE("Focus forced to infinity %d", focusMode);
11707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11708 rc = BAD_VALUE;
11709 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011710 }
11711
Jason Lee84ae9972017-02-24 13:24:24 -080011712 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11713 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011714 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11716 focalDistance)) {
11717 rc = BAD_VALUE;
11718 }
11719 }
11720
11721 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11722 uint8_t fwk_antibandingMode =
11723 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11724 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11725 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11726 if (NAME_NOT_FOUND != val) {
11727 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011728 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11729 if (m60HzZone) {
11730 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11731 } else {
11732 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11733 }
11734 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11736 hal_antibandingMode)) {
11737 rc = BAD_VALUE;
11738 }
11739 }
11740 }
11741
11742 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11743 int32_t expCompensation = frame_settings.find(
11744 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11745 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11746 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11747 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11748 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011749 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11751 expCompensation)) {
11752 rc = BAD_VALUE;
11753 }
11754 }
11755
11756 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11757 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11759 rc = BAD_VALUE;
11760 }
11761 }
11762 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11763 rc = setHalFpsRange(frame_settings, hal_metadata);
11764 if (rc != NO_ERROR) {
11765 LOGE("setHalFpsRange failed");
11766 }
11767 }
11768
11769 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11770 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11772 rc = BAD_VALUE;
11773 }
11774 }
11775
11776 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11777 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11778 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11779 fwk_effectMode);
11780 if (NAME_NOT_FOUND != val) {
11781 uint8_t effectMode = (uint8_t)val;
11782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11783 rc = BAD_VALUE;
11784 }
11785 }
11786 }
11787
11788 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11789 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11791 colorCorrectMode)) {
11792 rc = BAD_VALUE;
11793 }
11794 }
11795
11796 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11797 cam_color_correct_gains_t colorCorrectGains;
11798 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11799 colorCorrectGains.gains[i] =
11800 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11801 }
11802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11803 colorCorrectGains)) {
11804 rc = BAD_VALUE;
11805 }
11806 }
11807
11808 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11809 cam_color_correct_matrix_t colorCorrectTransform;
11810 cam_rational_type_t transform_elem;
11811 size_t num = 0;
11812 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11813 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11814 transform_elem.numerator =
11815 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11816 transform_elem.denominator =
11817 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11818 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11819 num++;
11820 }
11821 }
11822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11823 colorCorrectTransform)) {
11824 rc = BAD_VALUE;
11825 }
11826 }
11827
11828 cam_trigger_t aecTrigger;
11829 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11830 aecTrigger.trigger_id = -1;
11831 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11832 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11833 aecTrigger.trigger =
11834 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11835 aecTrigger.trigger_id =
11836 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11838 aecTrigger)) {
11839 rc = BAD_VALUE;
11840 }
11841 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11842 aecTrigger.trigger, aecTrigger.trigger_id);
11843 }
11844
11845 /*af_trigger must come with a trigger id*/
11846 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11847 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11848 cam_trigger_t af_trigger;
11849 af_trigger.trigger =
11850 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11851 af_trigger.trigger_id =
11852 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11854 rc = BAD_VALUE;
11855 }
11856 LOGD("AfTrigger: %d AfTriggerID: %d",
11857 af_trigger.trigger, af_trigger.trigger_id);
11858 }
11859
11860 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11861 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11863 rc = BAD_VALUE;
11864 }
11865 }
11866 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11867 cam_edge_application_t edge_application;
11868 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011869
Thierry Strudel3d639192016-09-09 11:52:26 -070011870 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11871 edge_application.sharpness = 0;
11872 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011873 edge_application.sharpness =
11874 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11875 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11876 int32_t sharpness =
11877 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11878 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11879 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11880 LOGD("Setting edge mode sharpness %d", sharpness);
11881 edge_application.sharpness = sharpness;
11882 }
11883 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011884 }
11885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11886 rc = BAD_VALUE;
11887 }
11888 }
11889
11890 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11891 int32_t respectFlashMode = 1;
11892 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11893 uint8_t fwk_aeMode =
11894 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11895 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11896 respectFlashMode = 0;
11897 LOGH("AE Mode controls flash, ignore android.flash.mode");
11898 }
11899 }
11900 if (respectFlashMode) {
11901 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11902 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11903 LOGH("flash mode after mapping %d", val);
11904 // To check: CAM_INTF_META_FLASH_MODE usage
11905 if (NAME_NOT_FOUND != val) {
11906 uint8_t flashMode = (uint8_t)val;
11907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11908 rc = BAD_VALUE;
11909 }
11910 }
11911 }
11912 }
11913
11914 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11915 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11917 rc = BAD_VALUE;
11918 }
11919 }
11920
11921 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11922 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11924 flashFiringTime)) {
11925 rc = BAD_VALUE;
11926 }
11927 }
11928
11929 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11930 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11932 hotPixelMode)) {
11933 rc = BAD_VALUE;
11934 }
11935 }
11936
11937 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11938 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11940 lensAperture)) {
11941 rc = BAD_VALUE;
11942 }
11943 }
11944
11945 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11946 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11948 filterDensity)) {
11949 rc = BAD_VALUE;
11950 }
11951 }
11952
11953 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11954 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11955 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11956 focalLength)) {
11957 rc = BAD_VALUE;
11958 }
11959 }
11960
11961 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11962 uint8_t optStabMode =
11963 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11965 optStabMode)) {
11966 rc = BAD_VALUE;
11967 }
11968 }
11969
11970 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11971 uint8_t videoStabMode =
11972 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11973 LOGD("videoStabMode from APP = %d", videoStabMode);
11974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11975 videoStabMode)) {
11976 rc = BAD_VALUE;
11977 }
11978 }
11979
11980
11981 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11982 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11984 noiseRedMode)) {
11985 rc = BAD_VALUE;
11986 }
11987 }
11988
11989 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11990 float reprocessEffectiveExposureFactor =
11991 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11992 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11993 reprocessEffectiveExposureFactor)) {
11994 rc = BAD_VALUE;
11995 }
11996 }
11997
11998 cam_crop_region_t scalerCropRegion;
11999 bool scalerCropSet = false;
12000 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12001 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12002 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12003 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12004 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12005
12006 // Map coordinate system from active array to sensor output.
12007 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12008 scalerCropRegion.width, scalerCropRegion.height);
12009
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12011 scalerCropRegion)) {
12012 rc = BAD_VALUE;
12013 }
12014 scalerCropSet = true;
12015 }
12016
12017 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12018 int64_t sensorExpTime =
12019 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12020 LOGD("setting sensorExpTime %lld", sensorExpTime);
12021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12022 sensorExpTime)) {
12023 rc = BAD_VALUE;
12024 }
12025 }
12026
12027 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12028 int64_t sensorFrameDuration =
12029 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012030 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12031 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12032 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12033 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12035 sensorFrameDuration)) {
12036 rc = BAD_VALUE;
12037 }
12038 }
12039
12040 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12041 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12042 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12043 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12044 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12045 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12046 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12048 sensorSensitivity)) {
12049 rc = BAD_VALUE;
12050 }
12051 }
12052
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012053#ifndef USE_HAL_3_3
12054 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12055 int32_t ispSensitivity =
12056 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12057 if (ispSensitivity <
12058 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12059 ispSensitivity =
12060 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12061 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12062 }
12063 if (ispSensitivity >
12064 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12065 ispSensitivity =
12066 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12067 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12068 }
12069 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12070 ispSensitivity)) {
12071 rc = BAD_VALUE;
12072 }
12073 }
12074#endif
12075
Thierry Strudel3d639192016-09-09 11:52:26 -070012076 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12077 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12079 rc = BAD_VALUE;
12080 }
12081 }
12082
12083 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12084 uint8_t fwk_facedetectMode =
12085 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12086
12087 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12088 fwk_facedetectMode);
12089
12090 if (NAME_NOT_FOUND != val) {
12091 uint8_t facedetectMode = (uint8_t)val;
12092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12093 facedetectMode)) {
12094 rc = BAD_VALUE;
12095 }
12096 }
12097 }
12098
Thierry Strudel54dc9782017-02-15 12:12:10 -080012099 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012100 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012101 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12103 histogramMode)) {
12104 rc = BAD_VALUE;
12105 }
12106 }
12107
12108 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12109 uint8_t sharpnessMapMode =
12110 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12112 sharpnessMapMode)) {
12113 rc = BAD_VALUE;
12114 }
12115 }
12116
12117 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12118 uint8_t tonemapMode =
12119 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12121 rc = BAD_VALUE;
12122 }
12123 }
12124 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12125 /*All tonemap channels will have the same number of points*/
12126 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12127 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12128 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12129 cam_rgb_tonemap_curves tonemapCurves;
12130 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12131 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12132 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12133 tonemapCurves.tonemap_points_cnt,
12134 CAM_MAX_TONEMAP_CURVE_SIZE);
12135 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12136 }
12137
12138 /* ch0 = G*/
12139 size_t point = 0;
12140 cam_tonemap_curve_t tonemapCurveGreen;
12141 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12142 for (size_t j = 0; j < 2; j++) {
12143 tonemapCurveGreen.tonemap_points[i][j] =
12144 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12145 point++;
12146 }
12147 }
12148 tonemapCurves.curves[0] = tonemapCurveGreen;
12149
12150 /* ch 1 = B */
12151 point = 0;
12152 cam_tonemap_curve_t tonemapCurveBlue;
12153 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12154 for (size_t j = 0; j < 2; j++) {
12155 tonemapCurveBlue.tonemap_points[i][j] =
12156 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12157 point++;
12158 }
12159 }
12160 tonemapCurves.curves[1] = tonemapCurveBlue;
12161
12162 /* ch 2 = R */
12163 point = 0;
12164 cam_tonemap_curve_t tonemapCurveRed;
12165 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12166 for (size_t j = 0; j < 2; j++) {
12167 tonemapCurveRed.tonemap_points[i][j] =
12168 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12169 point++;
12170 }
12171 }
12172 tonemapCurves.curves[2] = tonemapCurveRed;
12173
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12175 tonemapCurves)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179
12180 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12181 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12183 captureIntent)) {
12184 rc = BAD_VALUE;
12185 }
12186 }
12187
12188 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12189 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12191 blackLevelLock)) {
12192 rc = BAD_VALUE;
12193 }
12194 }
12195
12196 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12197 uint8_t lensShadingMapMode =
12198 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12200 lensShadingMapMode)) {
12201 rc = BAD_VALUE;
12202 }
12203 }
12204
12205 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12206 cam_area_t roi;
12207 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012208 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012209
12210 // Map coordinate system from active array to sensor output.
12211 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12212 roi.rect.height);
12213
12214 if (scalerCropSet) {
12215 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12216 }
12217 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12218 rc = BAD_VALUE;
12219 }
12220 }
12221
12222 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12223 cam_area_t roi;
12224 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012225 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012226
12227 // Map coordinate system from active array to sensor output.
12228 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12229 roi.rect.height);
12230
12231 if (scalerCropSet) {
12232 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12233 }
12234 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12235 rc = BAD_VALUE;
12236 }
12237 }
12238
12239 // CDS for non-HFR non-video mode
12240 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12241 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12242 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12243 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12244 LOGE("Invalid CDS mode %d!", *fwk_cds);
12245 } else {
12246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12247 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12248 rc = BAD_VALUE;
12249 }
12250 }
12251 }
12252
Thierry Strudel04e026f2016-10-10 11:27:36 -070012253 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012254 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012255 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012256 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12257 }
12258 if (m_bVideoHdrEnabled)
12259 vhdr = CAM_VIDEO_HDR_MODE_ON;
12260
Thierry Strudel54dc9782017-02-15 12:12:10 -080012261 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12262
12263 if(vhdr != curr_hdr_state)
12264 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12265
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012266 rc = setVideoHdrMode(mParameters, vhdr);
12267 if (rc != NO_ERROR) {
12268 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012269 }
12270
12271 //IR
12272 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12273 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12274 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012275 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12276 uint8_t isIRon = 0;
12277
12278 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012279 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12280 LOGE("Invalid IR mode %d!", fwk_ir);
12281 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012282 if(isIRon != curr_ir_state )
12283 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12284
Thierry Strudel04e026f2016-10-10 11:27:36 -070012285 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12286 CAM_INTF_META_IR_MODE, fwk_ir)) {
12287 rc = BAD_VALUE;
12288 }
12289 }
12290 }
12291
Thierry Strudel54dc9782017-02-15 12:12:10 -080012292 //Binning Correction Mode
12293 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12294 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12295 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12296 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12297 || (0 > fwk_binning_correction)) {
12298 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12299 } else {
12300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12301 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305 }
12306
Thierry Strudel269c81a2016-10-12 12:13:59 -070012307 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12308 float aec_speed;
12309 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12310 LOGD("AEC Speed :%f", aec_speed);
12311 if ( aec_speed < 0 ) {
12312 LOGE("Invalid AEC mode %f!", aec_speed);
12313 } else {
12314 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12315 aec_speed)) {
12316 rc = BAD_VALUE;
12317 }
12318 }
12319 }
12320
12321 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12322 float awb_speed;
12323 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12324 LOGD("AWB Speed :%f", awb_speed);
12325 if ( awb_speed < 0 ) {
12326 LOGE("Invalid AWB mode %f!", awb_speed);
12327 } else {
12328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12329 awb_speed)) {
12330 rc = BAD_VALUE;
12331 }
12332 }
12333 }
12334
Thierry Strudel3d639192016-09-09 11:52:26 -070012335 // TNR
12336 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12337 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12338 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012339 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012340 cam_denoise_param_t tnr;
12341 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12342 tnr.process_plates =
12343 (cam_denoise_process_type_t)frame_settings.find(
12344 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12345 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012346
12347 if(b_TnrRequested != curr_tnr_state)
12348 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12349
Thierry Strudel3d639192016-09-09 11:52:26 -070012350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354
Thierry Strudel54dc9782017-02-15 12:12:10 -080012355 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012356 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012357 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12359 *exposure_metering_mode)) {
12360 rc = BAD_VALUE;
12361 }
12362 }
12363
Thierry Strudel3d639192016-09-09 11:52:26 -070012364 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12365 int32_t fwk_testPatternMode =
12366 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12367 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12368 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12369
12370 if (NAME_NOT_FOUND != testPatternMode) {
12371 cam_test_pattern_data_t testPatternData;
12372 memset(&testPatternData, 0, sizeof(testPatternData));
12373 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12374 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12375 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12376 int32_t *fwk_testPatternData =
12377 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12378 testPatternData.r = fwk_testPatternData[0];
12379 testPatternData.b = fwk_testPatternData[3];
12380 switch (gCamCapability[mCameraId]->color_arrangement) {
12381 case CAM_FILTER_ARRANGEMENT_RGGB:
12382 case CAM_FILTER_ARRANGEMENT_GRBG:
12383 testPatternData.gr = fwk_testPatternData[1];
12384 testPatternData.gb = fwk_testPatternData[2];
12385 break;
12386 case CAM_FILTER_ARRANGEMENT_GBRG:
12387 case CAM_FILTER_ARRANGEMENT_BGGR:
12388 testPatternData.gr = fwk_testPatternData[2];
12389 testPatternData.gb = fwk_testPatternData[1];
12390 break;
12391 default:
12392 LOGE("color arrangement %d is not supported",
12393 gCamCapability[mCameraId]->color_arrangement);
12394 break;
12395 }
12396 }
12397 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12398 testPatternData)) {
12399 rc = BAD_VALUE;
12400 }
12401 } else {
12402 LOGE("Invalid framework sensor test pattern mode %d",
12403 fwk_testPatternMode);
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12408 size_t count = 0;
12409 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12410 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12411 gps_coords.data.d, gps_coords.count, count);
12412 if (gps_coords.count != count) {
12413 rc = BAD_VALUE;
12414 }
12415 }
12416
12417 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12418 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12419 size_t count = 0;
12420 const char *gps_methods_src = (const char *)
12421 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12422 memset(gps_methods, '\0', sizeof(gps_methods));
12423 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12424 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12425 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12426 if (GPS_PROCESSING_METHOD_SIZE != count) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430
12431 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12432 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12433 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12434 gps_timestamp)) {
12435 rc = BAD_VALUE;
12436 }
12437 }
12438
12439 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12440 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12441 cam_rotation_info_t rotation_info;
12442 if (orientation == 0) {
12443 rotation_info.rotation = ROTATE_0;
12444 } else if (orientation == 90) {
12445 rotation_info.rotation = ROTATE_90;
12446 } else if (orientation == 180) {
12447 rotation_info.rotation = ROTATE_180;
12448 } else if (orientation == 270) {
12449 rotation_info.rotation = ROTATE_270;
12450 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012451 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012452 rotation_info.streamId = snapshotStreamId;
12453 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458
12459 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12460 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12462 rc = BAD_VALUE;
12463 }
12464 }
12465
12466 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12467 uint32_t thumb_quality = (uint32_t)
12468 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12469 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12470 thumb_quality)) {
12471 rc = BAD_VALUE;
12472 }
12473 }
12474
12475 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12476 cam_dimension_t dim;
12477 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12478 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12479 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12480 rc = BAD_VALUE;
12481 }
12482 }
12483
12484 // Internal metadata
12485 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12486 size_t count = 0;
12487 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12488 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12489 privatedata.data.i32, privatedata.count, count);
12490 if (privatedata.count != count) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012495 // ISO/Exposure Priority
12496 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12497 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12498 cam_priority_mode_t mode =
12499 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12500 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12501 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12502 use_iso_exp_pty.previewOnly = FALSE;
12503 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12504 use_iso_exp_pty.value = *ptr;
12505
12506 if(CAM_ISO_PRIORITY == mode) {
12507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12508 use_iso_exp_pty)) {
12509 rc = BAD_VALUE;
12510 }
12511 }
12512 else {
12513 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12514 use_iso_exp_pty)) {
12515 rc = BAD_VALUE;
12516 }
12517 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012518
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12520 rc = BAD_VALUE;
12521 }
12522 }
12523 } else {
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12525 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012526 }
12527 }
12528
12529 // Saturation
12530 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12531 int32_t* use_saturation =
12532 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12533 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12534 rc = BAD_VALUE;
12535 }
12536 }
12537
Thierry Strudel3d639192016-09-09 11:52:26 -070012538 // EV step
12539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12540 gCamCapability[mCameraId]->exp_compensation_step)) {
12541 rc = BAD_VALUE;
12542 }
12543
12544 // CDS info
12545 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12546 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12547 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12548
12549 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12550 CAM_INTF_META_CDS_DATA, *cdsData)) {
12551 rc = BAD_VALUE;
12552 }
12553 }
12554
Shuzhen Wang19463d72016-03-08 11:09:52 -080012555 // Hybrid AE
12556 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12557 uint8_t *hybrid_ae = (uint8_t *)
12558 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12559
12560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12561 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12562 rc = BAD_VALUE;
12563 }
12564 }
12565
Shuzhen Wang14415f52016-11-16 18:26:18 -080012566 // Histogram
12567 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12568 uint8_t histogramMode =
12569 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12571 histogramMode)) {
12572 rc = BAD_VALUE;
12573 }
12574 }
12575
12576 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12577 int32_t histogramBins =
12578 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12579 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12580 histogramBins)) {
12581 rc = BAD_VALUE;
12582 }
12583 }
12584
Thierry Strudel3d639192016-09-09 11:52:26 -070012585 return rc;
12586}
12587
12588/*===========================================================================
12589 * FUNCTION : captureResultCb
12590 *
12591 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12592 *
12593 * PARAMETERS :
12594 * @frame : frame information from mm-camera-interface
12595 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12596 * @userdata: userdata
12597 *
12598 * RETURN : NONE
12599 *==========================================================================*/
12600void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12601 camera3_stream_buffer_t *buffer,
12602 uint32_t frame_number, bool isInputBuffer, void *userdata)
12603{
12604 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12605 if (hw == NULL) {
12606 LOGE("Invalid hw %p", hw);
12607 return;
12608 }
12609
12610 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12611 return;
12612}
12613
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012614/*===========================================================================
12615 * FUNCTION : setBufferErrorStatus
12616 *
12617 * DESCRIPTION: Callback handler for channels to report any buffer errors
12618 *
12619 * PARAMETERS :
12620 * @ch : Channel on which buffer error is reported from
12621 * @frame_number : frame number on which buffer error is reported on
12622 * @buffer_status : buffer error status
12623 * @userdata: userdata
12624 *
12625 * RETURN : NONE
12626 *==========================================================================*/
12627void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12628 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12629{
12630 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12631 if (hw == NULL) {
12632 LOGE("Invalid hw %p", hw);
12633 return;
12634 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012635
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012636 hw->setBufferErrorStatus(ch, frame_number, err);
12637 return;
12638}
12639
12640void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12641 uint32_t frameNumber, camera3_buffer_status_t err)
12642{
12643 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12644 pthread_mutex_lock(&mMutex);
12645
12646 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12647 if (req.frame_number != frameNumber)
12648 continue;
12649 for (auto& k : req.mPendingBufferList) {
12650 if(k.stream->priv == ch) {
12651 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12652 }
12653 }
12654 }
12655
12656 pthread_mutex_unlock(&mMutex);
12657 return;
12658}
Thierry Strudel3d639192016-09-09 11:52:26 -070012659/*===========================================================================
12660 * FUNCTION : initialize
12661 *
12662 * DESCRIPTION: Pass framework callback pointers to HAL
12663 *
12664 * PARAMETERS :
12665 *
12666 *
12667 * RETURN : Success : 0
12668 * Failure: -ENODEV
12669 *==========================================================================*/
12670
12671int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12672 const camera3_callback_ops_t *callback_ops)
12673{
12674 LOGD("E");
12675 QCamera3HardwareInterface *hw =
12676 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12677 if (!hw) {
12678 LOGE("NULL camera device");
12679 return -ENODEV;
12680 }
12681
12682 int rc = hw->initialize(callback_ops);
12683 LOGD("X");
12684 return rc;
12685}
12686
12687/*===========================================================================
12688 * FUNCTION : configure_streams
12689 *
12690 * DESCRIPTION:
12691 *
12692 * PARAMETERS :
12693 *
12694 *
12695 * RETURN : Success: 0
12696 * Failure: -EINVAL (if stream configuration is invalid)
12697 * -ENODEV (fatal error)
12698 *==========================================================================*/
12699
12700int QCamera3HardwareInterface::configure_streams(
12701 const struct camera3_device *device,
12702 camera3_stream_configuration_t *stream_list)
12703{
12704 LOGD("E");
12705 QCamera3HardwareInterface *hw =
12706 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12707 if (!hw) {
12708 LOGE("NULL camera device");
12709 return -ENODEV;
12710 }
12711 int rc = hw->configureStreams(stream_list);
12712 LOGD("X");
12713 return rc;
12714}
12715
12716/*===========================================================================
12717 * FUNCTION : construct_default_request_settings
12718 *
12719 * DESCRIPTION: Configure a settings buffer to meet the required use case
12720 *
12721 * PARAMETERS :
12722 *
12723 *
12724 * RETURN : Success: Return valid metadata
12725 * Failure: Return NULL
12726 *==========================================================================*/
12727const camera_metadata_t* QCamera3HardwareInterface::
12728 construct_default_request_settings(const struct camera3_device *device,
12729 int type)
12730{
12731
12732 LOGD("E");
12733 camera_metadata_t* fwk_metadata = NULL;
12734 QCamera3HardwareInterface *hw =
12735 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12736 if (!hw) {
12737 LOGE("NULL camera device");
12738 return NULL;
12739 }
12740
12741 fwk_metadata = hw->translateCapabilityToMetadata(type);
12742
12743 LOGD("X");
12744 return fwk_metadata;
12745}
12746
12747/*===========================================================================
12748 * FUNCTION : process_capture_request
12749 *
12750 * DESCRIPTION:
12751 *
12752 * PARAMETERS :
12753 *
12754 *
12755 * RETURN :
12756 *==========================================================================*/
12757int QCamera3HardwareInterface::process_capture_request(
12758 const struct camera3_device *device,
12759 camera3_capture_request_t *request)
12760{
12761 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012762 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012763 QCamera3HardwareInterface *hw =
12764 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12765 if (!hw) {
12766 LOGE("NULL camera device");
12767 return -EINVAL;
12768 }
12769
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012770 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012771 LOGD("X");
12772 return rc;
12773}
12774
12775/*===========================================================================
12776 * FUNCTION : dump
12777 *
12778 * DESCRIPTION:
12779 *
12780 * PARAMETERS :
12781 *
12782 *
12783 * RETURN :
12784 *==========================================================================*/
12785
12786void QCamera3HardwareInterface::dump(
12787 const struct camera3_device *device, int fd)
12788{
12789 /* Log level property is read when "adb shell dumpsys media.camera" is
12790 called so that the log level can be controlled without restarting
12791 the media server */
12792 getLogLevel();
12793
12794 LOGD("E");
12795 QCamera3HardwareInterface *hw =
12796 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12797 if (!hw) {
12798 LOGE("NULL camera device");
12799 return;
12800 }
12801
12802 hw->dump(fd);
12803 LOGD("X");
12804 return;
12805}
12806
12807/*===========================================================================
12808 * FUNCTION : flush
12809 *
12810 * DESCRIPTION:
12811 *
12812 * PARAMETERS :
12813 *
12814 *
12815 * RETURN :
12816 *==========================================================================*/
12817
12818int QCamera3HardwareInterface::flush(
12819 const struct camera3_device *device)
12820{
12821 int rc;
12822 LOGD("E");
12823 QCamera3HardwareInterface *hw =
12824 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12825 if (!hw) {
12826 LOGE("NULL camera device");
12827 return -EINVAL;
12828 }
12829
12830 pthread_mutex_lock(&hw->mMutex);
12831 // Validate current state
12832 switch (hw->mState) {
12833 case STARTED:
12834 /* valid state */
12835 break;
12836
12837 case ERROR:
12838 pthread_mutex_unlock(&hw->mMutex);
12839 hw->handleCameraDeviceError();
12840 return -ENODEV;
12841
12842 default:
12843 LOGI("Flush returned during state %d", hw->mState);
12844 pthread_mutex_unlock(&hw->mMutex);
12845 return 0;
12846 }
12847 pthread_mutex_unlock(&hw->mMutex);
12848
12849 rc = hw->flush(true /* restart channels */ );
12850 LOGD("X");
12851 return rc;
12852}
12853
12854/*===========================================================================
12855 * FUNCTION : close_camera_device
12856 *
12857 * DESCRIPTION:
12858 *
12859 * PARAMETERS :
12860 *
12861 *
12862 * RETURN :
12863 *==========================================================================*/
12864int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12865{
12866 int ret = NO_ERROR;
12867 QCamera3HardwareInterface *hw =
12868 reinterpret_cast<QCamera3HardwareInterface *>(
12869 reinterpret_cast<camera3_device_t *>(device)->priv);
12870 if (!hw) {
12871 LOGE("NULL camera device");
12872 return BAD_VALUE;
12873 }
12874
12875 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12876 delete hw;
12877 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012878 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012879 return ret;
12880}
12881
12882/*===========================================================================
12883 * FUNCTION : getWaveletDenoiseProcessPlate
12884 *
12885 * DESCRIPTION: query wavelet denoise process plate
12886 *
12887 * PARAMETERS : None
12888 *
12889 * RETURN : WNR prcocess plate value
12890 *==========================================================================*/
12891cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12892{
12893 char prop[PROPERTY_VALUE_MAX];
12894 memset(prop, 0, sizeof(prop));
12895 property_get("persist.denoise.process.plates", prop, "0");
12896 int processPlate = atoi(prop);
12897 switch(processPlate) {
12898 case 0:
12899 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12900 case 1:
12901 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12902 case 2:
12903 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12904 case 3:
12905 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12906 default:
12907 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12908 }
12909}
12910
12911
12912/*===========================================================================
12913 * FUNCTION : getTemporalDenoiseProcessPlate
12914 *
12915 * DESCRIPTION: query temporal denoise process plate
12916 *
12917 * PARAMETERS : None
12918 *
12919 * RETURN : TNR prcocess plate value
12920 *==========================================================================*/
12921cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12922{
12923 char prop[PROPERTY_VALUE_MAX];
12924 memset(prop, 0, sizeof(prop));
12925 property_get("persist.tnr.process.plates", prop, "0");
12926 int processPlate = atoi(prop);
12927 switch(processPlate) {
12928 case 0:
12929 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12930 case 1:
12931 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12932 case 2:
12933 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12934 case 3:
12935 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12936 default:
12937 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12938 }
12939}
12940
12941
12942/*===========================================================================
12943 * FUNCTION : extractSceneMode
12944 *
12945 * DESCRIPTION: Extract scene mode from frameworks set metadata
12946 *
12947 * PARAMETERS :
12948 * @frame_settings: CameraMetadata reference
12949 * @metaMode: ANDROID_CONTORL_MODE
12950 * @hal_metadata: hal metadata structure
12951 *
12952 * RETURN : None
12953 *==========================================================================*/
12954int32_t QCamera3HardwareInterface::extractSceneMode(
12955 const CameraMetadata &frame_settings, uint8_t metaMode,
12956 metadata_buffer_t *hal_metadata)
12957{
12958 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012959 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12960
12961 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12962 LOGD("Ignoring control mode OFF_KEEP_STATE");
12963 return NO_ERROR;
12964 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012965
12966 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12967 camera_metadata_ro_entry entry =
12968 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12969 if (0 == entry.count)
12970 return rc;
12971
12972 uint8_t fwk_sceneMode = entry.data.u8[0];
12973
12974 int val = lookupHalName(SCENE_MODES_MAP,
12975 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12976 fwk_sceneMode);
12977 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012978 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012979 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012980 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012981 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012982
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012983 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12984 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12985 }
12986
12987 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12988 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012989 cam_hdr_param_t hdr_params;
12990 hdr_params.hdr_enable = 1;
12991 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12992 hdr_params.hdr_need_1x = false;
12993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12994 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12995 rc = BAD_VALUE;
12996 }
12997 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012998
Thierry Strudel3d639192016-09-09 11:52:26 -070012999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13000 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13001 rc = BAD_VALUE;
13002 }
13003 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013004
13005 if (mForceHdrSnapshot) {
13006 cam_hdr_param_t hdr_params;
13007 hdr_params.hdr_enable = 1;
13008 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13009 hdr_params.hdr_need_1x = false;
13010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13011 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13012 rc = BAD_VALUE;
13013 }
13014 }
13015
Thierry Strudel3d639192016-09-09 11:52:26 -070013016 return rc;
13017}
13018
13019/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013020 * FUNCTION : setVideoHdrMode
13021 *
13022 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13023 *
13024 * PARAMETERS :
13025 * @hal_metadata: hal metadata structure
13026 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13027 *
13028 * RETURN : None
13029 *==========================================================================*/
13030int32_t QCamera3HardwareInterface::setVideoHdrMode(
13031 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13032{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013033 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13034 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13035 }
13036
13037 LOGE("Invalid Video HDR mode %d!", vhdr);
13038 return BAD_VALUE;
13039}
13040
13041/*===========================================================================
13042 * FUNCTION : setSensorHDR
13043 *
13044 * DESCRIPTION: Enable/disable sensor HDR.
13045 *
13046 * PARAMETERS :
13047 * @hal_metadata: hal metadata structure
13048 * @enable: boolean whether to enable/disable sensor HDR
13049 *
13050 * RETURN : None
13051 *==========================================================================*/
13052int32_t QCamera3HardwareInterface::setSensorHDR(
13053 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13054{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013055 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013056 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13057
13058 if (enable) {
13059 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13060 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13061 #ifdef _LE_CAMERA_
13062 //Default to staggered HDR for IOT
13063 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13064 #else
13065 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13066 #endif
13067 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13068 }
13069
13070 bool isSupported = false;
13071 switch (sensor_hdr) {
13072 case CAM_SENSOR_HDR_IN_SENSOR:
13073 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13074 CAM_QCOM_FEATURE_SENSOR_HDR) {
13075 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013076 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013077 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013078 break;
13079 case CAM_SENSOR_HDR_ZIGZAG:
13080 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13081 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13082 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013083 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013084 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013085 break;
13086 case CAM_SENSOR_HDR_STAGGERED:
13087 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13088 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13089 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013090 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013091 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013092 break;
13093 case CAM_SENSOR_HDR_OFF:
13094 isSupported = true;
13095 LOGD("Turning off sensor HDR");
13096 break;
13097 default:
13098 LOGE("HDR mode %d not supported", sensor_hdr);
13099 rc = BAD_VALUE;
13100 break;
13101 }
13102
13103 if(isSupported) {
13104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13105 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13106 rc = BAD_VALUE;
13107 } else {
13108 if(!isVideoHdrEnable)
13109 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013110 }
13111 }
13112 return rc;
13113}
13114
13115/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013116 * FUNCTION : needRotationReprocess
13117 *
13118 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13119 *
13120 * PARAMETERS : none
13121 *
13122 * RETURN : true: needed
13123 * false: no need
13124 *==========================================================================*/
13125bool QCamera3HardwareInterface::needRotationReprocess()
13126{
13127 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13128 // current rotation is not zero, and pp has the capability to process rotation
13129 LOGH("need do reprocess for rotation");
13130 return true;
13131 }
13132
13133 return false;
13134}
13135
13136/*===========================================================================
13137 * FUNCTION : needReprocess
13138 *
13139 * DESCRIPTION: if reprocess in needed
13140 *
13141 * PARAMETERS : none
13142 *
13143 * RETURN : true: needed
13144 * false: no need
13145 *==========================================================================*/
13146bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13147{
13148 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13149 // TODO: add for ZSL HDR later
13150 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13151 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13152 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13153 return true;
13154 } else {
13155 LOGH("already post processed frame");
13156 return false;
13157 }
13158 }
13159 return needRotationReprocess();
13160}
13161
13162/*===========================================================================
13163 * FUNCTION : needJpegExifRotation
13164 *
13165 * DESCRIPTION: if rotation from jpeg is needed
13166 *
13167 * PARAMETERS : none
13168 *
13169 * RETURN : true: needed
13170 * false: no need
13171 *==========================================================================*/
13172bool QCamera3HardwareInterface::needJpegExifRotation()
13173{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013174 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013175 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13176 LOGD("Need use Jpeg EXIF Rotation");
13177 return true;
13178 }
13179 return false;
13180}
13181
13182/*===========================================================================
13183 * FUNCTION : addOfflineReprocChannel
13184 *
13185 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13186 * coming from input channel
13187 *
13188 * PARAMETERS :
13189 * @config : reprocess configuration
13190 * @inputChHandle : pointer to the input (source) channel
13191 *
13192 *
13193 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13194 *==========================================================================*/
13195QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13196 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13197{
13198 int32_t rc = NO_ERROR;
13199 QCamera3ReprocessChannel *pChannel = NULL;
13200
13201 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013202 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13203 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013204 if (NULL == pChannel) {
13205 LOGE("no mem for reprocess channel");
13206 return NULL;
13207 }
13208
13209 rc = pChannel->initialize(IS_TYPE_NONE);
13210 if (rc != NO_ERROR) {
13211 LOGE("init reprocess channel failed, ret = %d", rc);
13212 delete pChannel;
13213 return NULL;
13214 }
13215
13216 // pp feature config
13217 cam_pp_feature_config_t pp_config;
13218 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13219
13220 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13221 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13222 & CAM_QCOM_FEATURE_DSDN) {
13223 //Use CPP CDS incase h/w supports it.
13224 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13225 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13226 }
13227 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13228 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13229 }
13230
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013231 if (config.hdr_param.hdr_enable) {
13232 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13233 pp_config.hdr_param = config.hdr_param;
13234 }
13235
13236 if (mForceHdrSnapshot) {
13237 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13238 pp_config.hdr_param.hdr_enable = 1;
13239 pp_config.hdr_param.hdr_need_1x = 0;
13240 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13241 }
13242
Thierry Strudel3d639192016-09-09 11:52:26 -070013243 rc = pChannel->addReprocStreamsFromSource(pp_config,
13244 config,
13245 IS_TYPE_NONE,
13246 mMetadataChannel);
13247
13248 if (rc != NO_ERROR) {
13249 delete pChannel;
13250 return NULL;
13251 }
13252 return pChannel;
13253}
13254
13255/*===========================================================================
13256 * FUNCTION : getMobicatMask
13257 *
13258 * DESCRIPTION: returns mobicat mask
13259 *
13260 * PARAMETERS : none
13261 *
13262 * RETURN : mobicat mask
13263 *
13264 *==========================================================================*/
13265uint8_t QCamera3HardwareInterface::getMobicatMask()
13266{
13267 return m_MobicatMask;
13268}
13269
13270/*===========================================================================
13271 * FUNCTION : setMobicat
13272 *
13273 * DESCRIPTION: set Mobicat on/off.
13274 *
13275 * PARAMETERS :
13276 * @params : none
13277 *
13278 * RETURN : int32_t type of status
13279 * NO_ERROR -- success
13280 * none-zero failure code
13281 *==========================================================================*/
13282int32_t QCamera3HardwareInterface::setMobicat()
13283{
13284 char value [PROPERTY_VALUE_MAX];
13285 property_get("persist.camera.mobicat", value, "0");
13286 int32_t ret = NO_ERROR;
13287 uint8_t enableMobi = (uint8_t)atoi(value);
13288
13289 if (enableMobi) {
13290 tune_cmd_t tune_cmd;
13291 tune_cmd.type = SET_RELOAD_CHROMATIX;
13292 tune_cmd.module = MODULE_ALL;
13293 tune_cmd.value = TRUE;
13294 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13295 CAM_INTF_PARM_SET_VFE_COMMAND,
13296 tune_cmd);
13297
13298 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13299 CAM_INTF_PARM_SET_PP_COMMAND,
13300 tune_cmd);
13301 }
13302 m_MobicatMask = enableMobi;
13303
13304 return ret;
13305}
13306
13307/*===========================================================================
13308* FUNCTION : getLogLevel
13309*
13310* DESCRIPTION: Reads the log level property into a variable
13311*
13312* PARAMETERS :
13313* None
13314*
13315* RETURN :
13316* None
13317*==========================================================================*/
13318void QCamera3HardwareInterface::getLogLevel()
13319{
13320 char prop[PROPERTY_VALUE_MAX];
13321 uint32_t globalLogLevel = 0;
13322
13323 property_get("persist.camera.hal.debug", prop, "0");
13324 int val = atoi(prop);
13325 if (0 <= val) {
13326 gCamHal3LogLevel = (uint32_t)val;
13327 }
13328
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013329 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013330 gKpiDebugLevel = atoi(prop);
13331
13332 property_get("persist.camera.global.debug", prop, "0");
13333 val = atoi(prop);
13334 if (0 <= val) {
13335 globalLogLevel = (uint32_t)val;
13336 }
13337
13338 /* Highest log level among hal.logs and global.logs is selected */
13339 if (gCamHal3LogLevel < globalLogLevel)
13340 gCamHal3LogLevel = globalLogLevel;
13341
13342 return;
13343}
13344
13345/*===========================================================================
13346 * FUNCTION : validateStreamRotations
13347 *
13348 * DESCRIPTION: Check if the rotations requested are supported
13349 *
13350 * PARAMETERS :
13351 * @stream_list : streams to be configured
13352 *
13353 * RETURN : NO_ERROR on success
13354 * -EINVAL on failure
13355 *
13356 *==========================================================================*/
13357int QCamera3HardwareInterface::validateStreamRotations(
13358 camera3_stream_configuration_t *streamList)
13359{
13360 int rc = NO_ERROR;
13361
13362 /*
13363 * Loop through all streams requested in configuration
13364 * Check if unsupported rotations have been requested on any of them
13365 */
13366 for (size_t j = 0; j < streamList->num_streams; j++){
13367 camera3_stream_t *newStream = streamList->streams[j];
13368
13369 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13370 bool isImplDef = (newStream->format ==
13371 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13372 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13373 isImplDef);
13374
13375 if (isRotated && (!isImplDef || isZsl)) {
13376 LOGE("Error: Unsupported rotation of %d requested for stream"
13377 "type:%d and stream format:%d",
13378 newStream->rotation, newStream->stream_type,
13379 newStream->format);
13380 rc = -EINVAL;
13381 break;
13382 }
13383 }
13384
13385 return rc;
13386}
13387
13388/*===========================================================================
13389* FUNCTION : getFlashInfo
13390*
13391* DESCRIPTION: Retrieve information about whether the device has a flash.
13392*
13393* PARAMETERS :
13394* @cameraId : Camera id to query
13395* @hasFlash : Boolean indicating whether there is a flash device
13396* associated with given camera
13397* @flashNode : If a flash device exists, this will be its device node.
13398*
13399* RETURN :
13400* None
13401*==========================================================================*/
13402void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13403 bool& hasFlash,
13404 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13405{
13406 cam_capability_t* camCapability = gCamCapability[cameraId];
13407 if (NULL == camCapability) {
13408 hasFlash = false;
13409 flashNode[0] = '\0';
13410 } else {
13411 hasFlash = camCapability->flash_available;
13412 strlcpy(flashNode,
13413 (char*)camCapability->flash_dev_name,
13414 QCAMERA_MAX_FILEPATH_LENGTH);
13415 }
13416}
13417
13418/*===========================================================================
13419* FUNCTION : getEepromVersionInfo
13420*
13421* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13422*
13423* PARAMETERS : None
13424*
13425* RETURN : string describing EEPROM version
13426* "\0" if no such info available
13427*==========================================================================*/
13428const char *QCamera3HardwareInterface::getEepromVersionInfo()
13429{
13430 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13431}
13432
13433/*===========================================================================
13434* FUNCTION : getLdafCalib
13435*
13436* DESCRIPTION: Retrieve Laser AF calibration data
13437*
13438* PARAMETERS : None
13439*
13440* RETURN : Two uint32_t describing laser AF calibration data
13441* NULL if none is available.
13442*==========================================================================*/
13443const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13444{
13445 if (mLdafCalibExist) {
13446 return &mLdafCalib[0];
13447 } else {
13448 return NULL;
13449 }
13450}
13451
13452/*===========================================================================
13453 * FUNCTION : dynamicUpdateMetaStreamInfo
13454 *
13455 * DESCRIPTION: This function:
13456 * (1) stops all the channels
13457 * (2) returns error on pending requests and buffers
13458 * (3) sends metastream_info in setparams
13459 * (4) starts all channels
13460 * This is useful when sensor has to be restarted to apply any
13461 * settings such as frame rate from a different sensor mode
13462 *
13463 * PARAMETERS : None
13464 *
13465 * RETURN : NO_ERROR on success
13466 * Error codes on failure
13467 *
13468 *==========================================================================*/
13469int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13470{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013471 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013472 int rc = NO_ERROR;
13473
13474 LOGD("E");
13475
13476 rc = stopAllChannels();
13477 if (rc < 0) {
13478 LOGE("stopAllChannels failed");
13479 return rc;
13480 }
13481
13482 rc = notifyErrorForPendingRequests();
13483 if (rc < 0) {
13484 LOGE("notifyErrorForPendingRequests failed");
13485 return rc;
13486 }
13487
13488 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13489 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13490 "Format:%d",
13491 mStreamConfigInfo.type[i],
13492 mStreamConfigInfo.stream_sizes[i].width,
13493 mStreamConfigInfo.stream_sizes[i].height,
13494 mStreamConfigInfo.postprocess_mask[i],
13495 mStreamConfigInfo.format[i]);
13496 }
13497
13498 /* Send meta stream info once again so that ISP can start */
13499 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13500 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13501 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13502 mParameters);
13503 if (rc < 0) {
13504 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13505 }
13506
13507 rc = startAllChannels();
13508 if (rc < 0) {
13509 LOGE("startAllChannels failed");
13510 return rc;
13511 }
13512
13513 LOGD("X");
13514 return rc;
13515}
13516
13517/*===========================================================================
13518 * FUNCTION : stopAllChannels
13519 *
13520 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13521 *
13522 * PARAMETERS : None
13523 *
13524 * RETURN : NO_ERROR on success
13525 * Error codes on failure
13526 *
13527 *==========================================================================*/
13528int32_t QCamera3HardwareInterface::stopAllChannels()
13529{
13530 int32_t rc = NO_ERROR;
13531
13532 LOGD("Stopping all channels");
13533 // Stop the Streams/Channels
13534 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13535 it != mStreamInfo.end(); it++) {
13536 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13537 if (channel) {
13538 channel->stop();
13539 }
13540 (*it)->status = INVALID;
13541 }
13542
13543 if (mSupportChannel) {
13544 mSupportChannel->stop();
13545 }
13546 if (mAnalysisChannel) {
13547 mAnalysisChannel->stop();
13548 }
13549 if (mRawDumpChannel) {
13550 mRawDumpChannel->stop();
13551 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013552 if (mHdrPlusRawSrcChannel) {
13553 mHdrPlusRawSrcChannel->stop();
13554 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013555 if (mMetadataChannel) {
13556 /* If content of mStreamInfo is not 0, there is metadata stream */
13557 mMetadataChannel->stop();
13558 }
13559
13560 LOGD("All channels stopped");
13561 return rc;
13562}
13563
13564/*===========================================================================
13565 * FUNCTION : startAllChannels
13566 *
13567 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13568 *
13569 * PARAMETERS : None
13570 *
13571 * RETURN : NO_ERROR on success
13572 * Error codes on failure
13573 *
13574 *==========================================================================*/
13575int32_t QCamera3HardwareInterface::startAllChannels()
13576{
13577 int32_t rc = NO_ERROR;
13578
13579 LOGD("Start all channels ");
13580 // Start the Streams/Channels
13581 if (mMetadataChannel) {
13582 /* If content of mStreamInfo is not 0, there is metadata stream */
13583 rc = mMetadataChannel->start();
13584 if (rc < 0) {
13585 LOGE("META channel start failed");
13586 return rc;
13587 }
13588 }
13589 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13590 it != mStreamInfo.end(); it++) {
13591 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13592 if (channel) {
13593 rc = channel->start();
13594 if (rc < 0) {
13595 LOGE("channel start failed");
13596 return rc;
13597 }
13598 }
13599 }
13600 if (mAnalysisChannel) {
13601 mAnalysisChannel->start();
13602 }
13603 if (mSupportChannel) {
13604 rc = mSupportChannel->start();
13605 if (rc < 0) {
13606 LOGE("Support channel start failed");
13607 return rc;
13608 }
13609 }
13610 if (mRawDumpChannel) {
13611 rc = mRawDumpChannel->start();
13612 if (rc < 0) {
13613 LOGE("RAW dump channel start failed");
13614 return rc;
13615 }
13616 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013617 if (mHdrPlusRawSrcChannel) {
13618 rc = mHdrPlusRawSrcChannel->start();
13619 if (rc < 0) {
13620 LOGE("HDR+ RAW channel start failed");
13621 return rc;
13622 }
13623 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013624
13625 LOGD("All channels started");
13626 return rc;
13627}
13628
13629/*===========================================================================
13630 * FUNCTION : notifyErrorForPendingRequests
13631 *
13632 * DESCRIPTION: This function sends error for all the pending requests/buffers
13633 *
13634 * PARAMETERS : None
13635 *
13636 * RETURN : Error codes
13637 * NO_ERROR on success
13638 *
13639 *==========================================================================*/
13640int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13641{
13642 int32_t rc = NO_ERROR;
13643 unsigned int frameNum = 0;
13644 camera3_capture_result_t result;
13645 camera3_stream_buffer_t *pStream_Buf = NULL;
13646
13647 memset(&result, 0, sizeof(camera3_capture_result_t));
13648
13649 if (mPendingRequestsList.size() > 0) {
13650 pendingRequestIterator i = mPendingRequestsList.begin();
13651 frameNum = i->frame_number;
13652 } else {
13653 /* There might still be pending buffers even though there are
13654 no pending requests. Setting the frameNum to MAX so that
13655 all the buffers with smaller frame numbers are returned */
13656 frameNum = UINT_MAX;
13657 }
13658
13659 LOGH("Oldest frame num on mPendingRequestsList = %u",
13660 frameNum);
13661
Emilian Peev7650c122017-01-19 08:24:33 -080013662 notifyErrorFoPendingDepthData(mDepthChannel);
13663
Thierry Strudel3d639192016-09-09 11:52:26 -070013664 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13665 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13666
13667 if (req->frame_number < frameNum) {
13668 // Send Error notify to frameworks for each buffer for which
13669 // metadata buffer is already sent
13670 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13671 req->frame_number, req->mPendingBufferList.size());
13672
13673 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13674 if (NULL == pStream_Buf) {
13675 LOGE("No memory for pending buffers array");
13676 return NO_MEMORY;
13677 }
13678 memset(pStream_Buf, 0,
13679 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13680 result.result = NULL;
13681 result.frame_number = req->frame_number;
13682 result.num_output_buffers = req->mPendingBufferList.size();
13683 result.output_buffers = pStream_Buf;
13684
13685 size_t index = 0;
13686 for (auto info = req->mPendingBufferList.begin();
13687 info != req->mPendingBufferList.end(); ) {
13688
13689 camera3_notify_msg_t notify_msg;
13690 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13691 notify_msg.type = CAMERA3_MSG_ERROR;
13692 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13693 notify_msg.message.error.error_stream = info->stream;
13694 notify_msg.message.error.frame_number = req->frame_number;
13695 pStream_Buf[index].acquire_fence = -1;
13696 pStream_Buf[index].release_fence = -1;
13697 pStream_Buf[index].buffer = info->buffer;
13698 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13699 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013700 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013701 index++;
13702 // Remove buffer from list
13703 info = req->mPendingBufferList.erase(info);
13704 }
13705
13706 // Remove this request from Map
13707 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13708 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13709 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13710
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013711 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013712
13713 delete [] pStream_Buf;
13714 } else {
13715
13716 // Go through the pending requests info and send error request to framework
13717 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13718
13719 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13720
13721 // Send error notify to frameworks
13722 camera3_notify_msg_t notify_msg;
13723 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13724 notify_msg.type = CAMERA3_MSG_ERROR;
13725 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13726 notify_msg.message.error.error_stream = NULL;
13727 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013728 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013729
13730 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13731 if (NULL == pStream_Buf) {
13732 LOGE("No memory for pending buffers array");
13733 return NO_MEMORY;
13734 }
13735 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13736
13737 result.result = NULL;
13738 result.frame_number = req->frame_number;
13739 result.input_buffer = i->input_buffer;
13740 result.num_output_buffers = req->mPendingBufferList.size();
13741 result.output_buffers = pStream_Buf;
13742
13743 size_t index = 0;
13744 for (auto info = req->mPendingBufferList.begin();
13745 info != req->mPendingBufferList.end(); ) {
13746 pStream_Buf[index].acquire_fence = -1;
13747 pStream_Buf[index].release_fence = -1;
13748 pStream_Buf[index].buffer = info->buffer;
13749 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13750 pStream_Buf[index].stream = info->stream;
13751 index++;
13752 // Remove buffer from list
13753 info = req->mPendingBufferList.erase(info);
13754 }
13755
13756 // Remove this request from Map
13757 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13758 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13759 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13760
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013761 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013762 delete [] pStream_Buf;
13763 i = erasePendingRequest(i);
13764 }
13765 }
13766
13767 /* Reset pending frame Drop list and requests list */
13768 mPendingFrameDropList.clear();
13769
13770 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13771 req.mPendingBufferList.clear();
13772 }
13773 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013774 LOGH("Cleared all the pending buffers ");
13775
13776 return rc;
13777}
13778
13779bool QCamera3HardwareInterface::isOnEncoder(
13780 const cam_dimension_t max_viewfinder_size,
13781 uint32_t width, uint32_t height)
13782{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013783 return ((width > (uint32_t)max_viewfinder_size.width) ||
13784 (height > (uint32_t)max_viewfinder_size.height) ||
13785 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13786 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013787}
13788
13789/*===========================================================================
13790 * FUNCTION : setBundleInfo
13791 *
13792 * DESCRIPTION: Set bundle info for all streams that are bundle.
13793 *
13794 * PARAMETERS : None
13795 *
13796 * RETURN : NO_ERROR on success
13797 * Error codes on failure
13798 *==========================================================================*/
13799int32_t QCamera3HardwareInterface::setBundleInfo()
13800{
13801 int32_t rc = NO_ERROR;
13802
13803 if (mChannelHandle) {
13804 cam_bundle_config_t bundleInfo;
13805 memset(&bundleInfo, 0, sizeof(bundleInfo));
13806 rc = mCameraHandle->ops->get_bundle_info(
13807 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13808 if (rc != NO_ERROR) {
13809 LOGE("get_bundle_info failed");
13810 return rc;
13811 }
13812 if (mAnalysisChannel) {
13813 mAnalysisChannel->setBundleInfo(bundleInfo);
13814 }
13815 if (mSupportChannel) {
13816 mSupportChannel->setBundleInfo(bundleInfo);
13817 }
13818 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13819 it != mStreamInfo.end(); it++) {
13820 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13821 channel->setBundleInfo(bundleInfo);
13822 }
13823 if (mRawDumpChannel) {
13824 mRawDumpChannel->setBundleInfo(bundleInfo);
13825 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013826 if (mHdrPlusRawSrcChannel) {
13827 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13828 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013829 }
13830
13831 return rc;
13832}
13833
13834/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013835 * FUNCTION : setInstantAEC
13836 *
13837 * DESCRIPTION: Set Instant AEC related params.
13838 *
13839 * PARAMETERS :
13840 * @meta: CameraMetadata reference
13841 *
13842 * RETURN : NO_ERROR on success
13843 * Error codes on failure
13844 *==========================================================================*/
13845int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13846{
13847 int32_t rc = NO_ERROR;
13848 uint8_t val = 0;
13849 char prop[PROPERTY_VALUE_MAX];
13850
13851 // First try to configure instant AEC from framework metadata
13852 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13853 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13854 }
13855
13856 // If framework did not set this value, try to read from set prop.
13857 if (val == 0) {
13858 memset(prop, 0, sizeof(prop));
13859 property_get("persist.camera.instant.aec", prop, "0");
13860 val = (uint8_t)atoi(prop);
13861 }
13862
13863 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13864 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13866 mInstantAEC = val;
13867 mInstantAECSettledFrameNumber = 0;
13868 mInstantAecFrameIdxCount = 0;
13869 LOGH("instantAEC value set %d",val);
13870 if (mInstantAEC) {
13871 memset(prop, 0, sizeof(prop));
13872 property_get("persist.camera.ae.instant.bound", prop, "10");
13873 int32_t aec_frame_skip_cnt = atoi(prop);
13874 if (aec_frame_skip_cnt >= 0) {
13875 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13876 } else {
13877 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13878 rc = BAD_VALUE;
13879 }
13880 }
13881 } else {
13882 LOGE("Bad instant aec value set %d", val);
13883 rc = BAD_VALUE;
13884 }
13885 return rc;
13886}
13887
13888/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013889 * FUNCTION : get_num_overall_buffers
13890 *
13891 * DESCRIPTION: Estimate number of pending buffers across all requests.
13892 *
13893 * PARAMETERS : None
13894 *
13895 * RETURN : Number of overall pending buffers
13896 *
13897 *==========================================================================*/
13898uint32_t PendingBuffersMap::get_num_overall_buffers()
13899{
13900 uint32_t sum_buffers = 0;
13901 for (auto &req : mPendingBuffersInRequest) {
13902 sum_buffers += req.mPendingBufferList.size();
13903 }
13904 return sum_buffers;
13905}
13906
13907/*===========================================================================
13908 * FUNCTION : removeBuf
13909 *
13910 * DESCRIPTION: Remove a matching buffer from tracker.
13911 *
13912 * PARAMETERS : @buffer: image buffer for the callback
13913 *
13914 * RETURN : None
13915 *
13916 *==========================================================================*/
13917void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13918{
13919 bool buffer_found = false;
13920 for (auto req = mPendingBuffersInRequest.begin();
13921 req != mPendingBuffersInRequest.end(); req++) {
13922 for (auto k = req->mPendingBufferList.begin();
13923 k != req->mPendingBufferList.end(); k++ ) {
13924 if (k->buffer == buffer) {
13925 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13926 req->frame_number, buffer);
13927 k = req->mPendingBufferList.erase(k);
13928 if (req->mPendingBufferList.empty()) {
13929 // Remove this request from Map
13930 req = mPendingBuffersInRequest.erase(req);
13931 }
13932 buffer_found = true;
13933 break;
13934 }
13935 }
13936 if (buffer_found) {
13937 break;
13938 }
13939 }
13940 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13941 get_num_overall_buffers());
13942}
13943
13944/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013945 * FUNCTION : getBufErrStatus
13946 *
13947 * DESCRIPTION: get buffer error status
13948 *
13949 * PARAMETERS : @buffer: buffer handle
13950 *
13951 * RETURN : Error status
13952 *
13953 *==========================================================================*/
13954int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13955{
13956 for (auto& req : mPendingBuffersInRequest) {
13957 for (auto& k : req.mPendingBufferList) {
13958 if (k.buffer == buffer)
13959 return k.bufStatus;
13960 }
13961 }
13962 return CAMERA3_BUFFER_STATUS_OK;
13963}
13964
13965/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013966 * FUNCTION : setPAAFSupport
13967 *
13968 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13969 * feature mask according to stream type and filter
13970 * arrangement
13971 *
13972 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13973 * @stream_type: stream type
13974 * @filter_arrangement: filter arrangement
13975 *
13976 * RETURN : None
13977 *==========================================================================*/
13978void QCamera3HardwareInterface::setPAAFSupport(
13979 cam_feature_mask_t& feature_mask,
13980 cam_stream_type_t stream_type,
13981 cam_color_filter_arrangement_t filter_arrangement)
13982{
Thierry Strudel3d639192016-09-09 11:52:26 -070013983 switch (filter_arrangement) {
13984 case CAM_FILTER_ARRANGEMENT_RGGB:
13985 case CAM_FILTER_ARRANGEMENT_GRBG:
13986 case CAM_FILTER_ARRANGEMENT_GBRG:
13987 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013988 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13989 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013990 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013991 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13992 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013993 }
13994 break;
13995 case CAM_FILTER_ARRANGEMENT_Y:
13996 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13997 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13998 }
13999 break;
14000 default:
14001 break;
14002 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014003 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14004 feature_mask, stream_type, filter_arrangement);
14005
14006
Thierry Strudel3d639192016-09-09 11:52:26 -070014007}
14008
14009/*===========================================================================
14010* FUNCTION : getSensorMountAngle
14011*
14012* DESCRIPTION: Retrieve sensor mount angle
14013*
14014* PARAMETERS : None
14015*
14016* RETURN : sensor mount angle in uint32_t
14017*==========================================================================*/
14018uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14019{
14020 return gCamCapability[mCameraId]->sensor_mount_angle;
14021}
14022
14023/*===========================================================================
14024* FUNCTION : getRelatedCalibrationData
14025*
14026* DESCRIPTION: Retrieve related system calibration data
14027*
14028* PARAMETERS : None
14029*
14030* RETURN : Pointer of related system calibration data
14031*==========================================================================*/
14032const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14033{
14034 return (const cam_related_system_calibration_data_t *)
14035 &(gCamCapability[mCameraId]->related_cam_calibration);
14036}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014037
14038/*===========================================================================
14039 * FUNCTION : is60HzZone
14040 *
14041 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14042 *
14043 * PARAMETERS : None
14044 *
14045 * RETURN : True if in 60Hz zone, False otherwise
14046 *==========================================================================*/
14047bool QCamera3HardwareInterface::is60HzZone()
14048{
14049 time_t t = time(NULL);
14050 struct tm lt;
14051
14052 struct tm* r = localtime_r(&t, &lt);
14053
14054 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14055 return true;
14056 else
14057 return false;
14058}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014059
14060/*===========================================================================
14061 * FUNCTION : adjustBlackLevelForCFA
14062 *
14063 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14064 * of bayer CFA (Color Filter Array).
14065 *
14066 * PARAMETERS : @input: black level pattern in the order of RGGB
14067 * @output: black level pattern in the order of CFA
14068 * @color_arrangement: CFA color arrangement
14069 *
14070 * RETURN : None
14071 *==========================================================================*/
14072template<typename T>
14073void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14074 T input[BLACK_LEVEL_PATTERN_CNT],
14075 T output[BLACK_LEVEL_PATTERN_CNT],
14076 cam_color_filter_arrangement_t color_arrangement)
14077{
14078 switch (color_arrangement) {
14079 case CAM_FILTER_ARRANGEMENT_GRBG:
14080 output[0] = input[1];
14081 output[1] = input[0];
14082 output[2] = input[3];
14083 output[3] = input[2];
14084 break;
14085 case CAM_FILTER_ARRANGEMENT_GBRG:
14086 output[0] = input[2];
14087 output[1] = input[3];
14088 output[2] = input[0];
14089 output[3] = input[1];
14090 break;
14091 case CAM_FILTER_ARRANGEMENT_BGGR:
14092 output[0] = input[3];
14093 output[1] = input[2];
14094 output[2] = input[1];
14095 output[3] = input[0];
14096 break;
14097 case CAM_FILTER_ARRANGEMENT_RGGB:
14098 output[0] = input[0];
14099 output[1] = input[1];
14100 output[2] = input[2];
14101 output[3] = input[3];
14102 break;
14103 default:
14104 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14105 break;
14106 }
14107}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014108
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014109void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14110 CameraMetadata &resultMetadata,
14111 std::shared_ptr<metadata_buffer_t> settings)
14112{
14113 if (settings == nullptr) {
14114 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14115 return;
14116 }
14117
14118 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14119 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14120 }
14121
14122 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14123 String8 str((const char *)gps_methods);
14124 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14125 }
14126
14127 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14128 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14129 }
14130
14131 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14132 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14133 }
14134
14135 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14136 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14137 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14138 }
14139
14140 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14141 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14142 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14143 }
14144
14145 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14146 int32_t fwk_thumb_size[2];
14147 fwk_thumb_size[0] = thumb_size->width;
14148 fwk_thumb_size[1] = thumb_size->height;
14149 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14150 }
14151
14152 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14153 uint8_t fwk_intent = intent[0];
14154 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14155 }
14156}
14157
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014158bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
14159 const camera3_capture_request_t &request, const CameraMetadata &metadata)
14160{
14161 if (hdrPlusRequest == nullptr) return false;
14162
14163 // Check noise reduction mode is high quality.
14164 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14165 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14166 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014167 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14168 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014169 return false;
14170 }
14171
14172 // Check edge mode is high quality.
14173 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14174 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14175 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14176 return false;
14177 }
14178
14179 if (request.num_output_buffers != 1 ||
14180 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14181 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014182 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14183 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14184 request.output_buffers[0].stream->width,
14185 request.output_buffers[0].stream->height,
14186 request.output_buffers[0].stream->format);
14187 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014188 return false;
14189 }
14190
14191 // Get a YUV buffer from pic channel.
14192 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14193 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14194 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14195 if (res != OK) {
14196 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14197 __FUNCTION__, strerror(-res), res);
14198 return false;
14199 }
14200
14201 pbcamera::StreamBuffer buffer;
14202 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014203 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014204 buffer.data = yuvBuffer->buffer;
14205 buffer.dataSize = yuvBuffer->frame_len;
14206
14207 pbcamera::CaptureRequest pbRequest;
14208 pbRequest.id = request.frame_number;
14209 pbRequest.outputBuffers.push_back(buffer);
14210
14211 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014212 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014213 if (res != OK) {
14214 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14215 strerror(-res), res);
14216 return false;
14217 }
14218
14219 hdrPlusRequest->yuvBuffer = yuvBuffer;
14220 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14221
14222 return true;
14223}
14224
Chien-Yu Chenee335912017-02-09 17:53:20 -080014225status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14226{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014227 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014228 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
14229 return -ENODEV;
14230 }
14231
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014232 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014233
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014234 // Connect to HDR+ service if it's not connected yet.
14235 pthread_mutex_lock(&gCamLock);
14236 if (!gEaselConnected) {
14237 // Connect to HDR+ service
14238 res = gHdrPlusClient->connect(this);
14239 if (res != OK) {
14240 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
14241 strerror(-res), res);
14242 pthread_mutex_unlock(&gCamLock);
14243 return res;
14244 }
14245
14246 // Set static metadata.
14247 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14248 if (res != OK) {
14249 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
14250 strerror(-res), res);
14251 gHdrPlusClient->disconnect();
14252 pthread_mutex_unlock(&gCamLock);
14253 return res;
14254 }
14255 gEaselConnected = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014256 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014257 pthread_mutex_unlock(&gCamLock);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014258
14259 // Configure stream for HDR+.
14260 res = configureHdrPlusStreamsLocked();
14261 if (res != OK) {
14262 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014263 return res;
14264 }
14265
14266 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14267 res = gHdrPlusClient->setZslHdrPlusMode(true);
14268 if (res != OK) {
14269 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014270 return res;
14271 }
14272
14273 mHdrPlusModeEnabled = true;
14274 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14275
14276 return OK;
14277}
14278
14279void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14280{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014281 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014282 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014283 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14284 if (res != OK) {
14285 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14286 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014287 }
14288
14289 mHdrPlusModeEnabled = false;
14290 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14291}
14292
14293status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014294{
14295 pbcamera::InputConfiguration inputConfig;
14296 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14297 status_t res = OK;
14298
14299 // Configure HDR+ client streams.
14300 // Get input config.
14301 if (mHdrPlusRawSrcChannel) {
14302 // HDR+ input buffers will be provided by HAL.
14303 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14304 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14305 if (res != OK) {
14306 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14307 __FUNCTION__, strerror(-res), res);
14308 return res;
14309 }
14310
14311 inputConfig.isSensorInput = false;
14312 } else {
14313 // Sensor MIPI will send data to Easel.
14314 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014315 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014316 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14317 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14318 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14319 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14320 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14321 if (mSensorModeInfo.num_raw_bits != 10) {
14322 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14323 mSensorModeInfo.num_raw_bits);
14324 return BAD_VALUE;
14325 }
14326
14327 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014328 }
14329
14330 // Get output configurations.
14331 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014332 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014333
14334 // Easel may need to output YUV output buffers if mPictureChannel was created.
14335 pbcamera::StreamConfiguration yuvOutputConfig;
14336 if (mPictureChannel != nullptr) {
14337 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14338 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14339 if (res != OK) {
14340 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14341 __FUNCTION__, strerror(-res), res);
14342
14343 return res;
14344 }
14345
14346 outputStreamConfigs.push_back(yuvOutputConfig);
14347 }
14348
14349 // TODO: consider other channels for YUV output buffers.
14350
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014351 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014352 if (res != OK) {
14353 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14354 strerror(-res), res);
14355 return res;
14356 }
14357
14358 return OK;
14359}
14360
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014361void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14362 const camera_metadata_t &resultMetadata) {
14363 if (result != nullptr) {
14364 if (result->outputBuffers.size() != 1) {
14365 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14366 result->outputBuffers.size());
14367 return;
14368 }
14369
14370 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14371 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14372 result->outputBuffers[0].streamId);
14373 return;
14374 }
14375
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014376 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014377 HdrPlusPendingRequest pendingRequest;
14378 {
14379 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14380 auto req = mHdrPlusPendingRequests.find(result->requestId);
14381 pendingRequest = req->second;
14382 }
14383
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014384 // Update the result metadata with the settings of the HDR+ still capture request because
14385 // the result metadata belongs to a ZSL buffer.
14386 CameraMetadata metadata;
14387 metadata = &resultMetadata;
14388 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14389 camera_metadata_t* updatedResultMetadata = metadata.release();
14390
14391 QCamera3PicChannel *picChannel =
14392 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14393
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014394 // Check if dumping HDR+ YUV output is enabled.
14395 char prop[PROPERTY_VALUE_MAX];
14396 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14397 bool dumpYuvOutput = atoi(prop);
14398
14399 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014400 // Dump yuv buffer to a ppm file.
14401 pbcamera::StreamConfiguration outputConfig;
14402 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14403 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14404 if (rc == OK) {
14405 char buf[FILENAME_MAX] = {};
14406 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14407 result->requestId, result->outputBuffers[0].streamId,
14408 outputConfig.image.width, outputConfig.image.height);
14409
14410 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14411 } else {
14412 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14413 __FUNCTION__, strerror(-rc), rc);
14414 }
14415 }
14416
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014417 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14418 auto halMetadata = std::make_shared<metadata_buffer_t>();
14419 clear_metadata_buffer(halMetadata.get());
14420
14421 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14422 // encoding.
14423 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14424 halStreamId, /*minFrameDuration*/0);
14425 if (res == OK) {
14426 // Return the buffer to pic channel for encoding.
14427 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14428 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14429 halMetadata);
14430 } else {
14431 // Return the buffer without encoding.
14432 // TODO: This should not happen but we may want to report an error buffer to camera
14433 // service.
14434 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14435 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14436 strerror(-res), res);
14437 }
14438
14439 // Send HDR+ metadata to framework.
14440 {
14441 pthread_mutex_lock(&mMutex);
14442
14443 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14444 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14445 pthread_mutex_unlock(&mMutex);
14446 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014447
14448 // Remove the HDR+ pending request.
14449 {
14450 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14451 auto req = mHdrPlusPendingRequests.find(result->requestId);
14452 mHdrPlusPendingRequests.erase(req);
14453 }
14454 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014455}
14456
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014457void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14458 // TODO: Handle HDR+ capture failures and send the failure to framework.
14459 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14460 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14461
14462 // Return the buffer to pic channel.
14463 QCamera3PicChannel *picChannel =
14464 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14465 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14466
14467 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014468}
14469
Thierry Strudel3d639192016-09-09 11:52:26 -070014470}; //end namespace qcamera