blob: c16a7b54cfff1c5001e27b2d0a5ccf877cdef6a3 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700135// If Easel is connected.
136bool gEaselConnected;
Thierry Strudel3d639192016-09-09 11:52:26 -0700137
138const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
139 {"On", CAM_CDS_MODE_ON},
140 {"Off", CAM_CDS_MODE_OFF},
141 {"Auto",CAM_CDS_MODE_AUTO}
142};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_video_hdr_mode_t,
145 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
146 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
147 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
148};
149
Thierry Strudel54dc9782017-02-15 12:12:10 -0800150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_binning_correction_mode_t,
152 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
153 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
154 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
155};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700156
157const QCamera3HardwareInterface::QCameraMap<
158 camera_metadata_enum_android_ir_mode_t,
159 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
160 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
161 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
162 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
163};
Thierry Strudel3d639192016-09-09 11:52:26 -0700164
165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_control_effect_mode_t,
167 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
168 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
169 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
170 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
171 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
173 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
174 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
175 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
176 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
177};
178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_control_awb_mode_t,
181 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
182 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
183 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
184 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
185 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
186 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
187 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
188 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
189 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
190 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194 camera_metadata_enum_android_control_scene_mode_t,
195 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
196 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
197 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
198 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
199 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
200 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
201 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
202 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
203 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
204 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
205 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
206 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
207 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
208 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
209 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
210 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800211 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
212 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_af_mode_t,
217 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
218 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
219 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
220 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
221 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
222 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
223 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
224 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_color_correction_aberration_mode_t,
229 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
231 CAM_COLOR_CORRECTION_ABERRATION_OFF },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
233 CAM_COLOR_CORRECTION_ABERRATION_FAST },
234 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
235 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_ae_antibanding_mode_t,
240 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
243 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
244 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_control_ae_mode_t,
249 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
250 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
251 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
253 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
254 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_flash_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
260 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
262 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_statistics_face_detect_mode_t,
267 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
269 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
270 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
275 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
277 CAM_FOCUS_UNCALIBRATED },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
279 CAM_FOCUS_APPROXIMATE },
280 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
281 CAM_FOCUS_CALIBRATED }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_state_t,
286 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
287 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
288 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
289};
290
291const int32_t available_thumbnail_sizes[] = {0, 0,
292 176, 144,
293 240, 144,
294 256, 144,
295 240, 160,
296 256, 154,
297 240, 240,
298 320, 240};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_sensor_test_pattern_mode_t,
302 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
307 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
308 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
309};
310
311/* Since there is no mapping for all the options some Android enum are not listed.
312 * Also, the order in this list is important because while mapping from HAL to Android it will
313 * traverse from lower to higher index which means that for HAL values that are map to different
314 * Android values, the traverse logic will select the first one found.
315 */
316const QCamera3HardwareInterface::QCameraMap<
317 camera_metadata_enum_android_sensor_reference_illuminant1_t,
318 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
335};
336
337const QCamera3HardwareInterface::QCameraMap<
338 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
339 { 60, CAM_HFR_MODE_60FPS},
340 { 90, CAM_HFR_MODE_90FPS},
341 { 120, CAM_HFR_MODE_120FPS},
342 { 150, CAM_HFR_MODE_150FPS},
343 { 180, CAM_HFR_MODE_180FPS},
344 { 210, CAM_HFR_MODE_210FPS},
345 { 240, CAM_HFR_MODE_240FPS},
346 { 480, CAM_HFR_MODE_480FPS},
347};
348
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700349const QCamera3HardwareInterface::QCameraMap<
350 qcamera3_ext_instant_aec_mode_t,
351 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
352 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
353 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
354 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
355};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800356
357const QCamera3HardwareInterface::QCameraMap<
358 qcamera3_ext_exposure_meter_mode_t,
359 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
360 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
361 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
362 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
363 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
364 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
365 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
366 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
367};
368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_iso_mode_t,
371 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
372 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
373 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
374 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
375 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
376 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
377 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
378 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
379 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
380};
381
Thierry Strudel3d639192016-09-09 11:52:26 -0700382camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
383 .initialize = QCamera3HardwareInterface::initialize,
384 .configure_streams = QCamera3HardwareInterface::configure_streams,
385 .register_stream_buffers = NULL,
386 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
387 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
388 .get_metadata_vendor_tag_ops = NULL,
389 .dump = QCamera3HardwareInterface::dump,
390 .flush = QCamera3HardwareInterface::flush,
391 .reserved = {0},
392};
393
394// initialise to some default value
395uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
396
397/*===========================================================================
398 * FUNCTION : QCamera3HardwareInterface
399 *
400 * DESCRIPTION: constructor of QCamera3HardwareInterface
401 *
402 * PARAMETERS :
403 * @cameraId : camera ID
404 *
405 * RETURN : none
406 *==========================================================================*/
407QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
408 const camera_module_callbacks_t *callbacks)
409 : mCameraId(cameraId),
410 mCameraHandle(NULL),
411 mCameraInitialized(false),
412 mCallbackOps(NULL),
413 mMetadataChannel(NULL),
414 mPictureChannel(NULL),
415 mRawChannel(NULL),
416 mSupportChannel(NULL),
417 mAnalysisChannel(NULL),
418 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700419 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800421 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800422 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700423 mChannelHandle(0),
424 mFirstConfiguration(true),
425 mFlush(false),
426 mFlushPerf(false),
427 mParamHeap(NULL),
428 mParameters(NULL),
429 mPrevParameters(NULL),
430 m_bIsVideo(false),
431 m_bIs4KVideo(false),
432 m_bEisSupportedSize(false),
433 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800434 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 m_MobicatMask(0),
436 mMinProcessedFrameDuration(0),
437 mMinJpegFrameDuration(0),
438 mMinRawFrameDuration(0),
439 mMetaFrameCount(0U),
440 mUpdateDebugLevel(false),
441 mCallbacks(callbacks),
442 mCaptureIntent(0),
443 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700444 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800445 /* DevCamDebug metadata internal m control*/
446 mDevCamDebugMetaEnable(0),
447 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mBatchSize(0),
449 mToBeQueuedVidBufs(0),
450 mHFRVideoFps(DEFAULT_VIDEO_FPS),
451 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800452 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800453 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mFirstFrameNumberInBatch(0),
455 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700457 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
458 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000459 mPDSupported(false),
460 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700461 mInstantAEC(false),
462 mResetInstantAEC(false),
463 mInstantAECSettledFrameNumber(0),
464 mAecSkipDisplayFrameBound(0),
465 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800466 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700467 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700468 mLastCustIntentFrmNum(-1),
469 mState(CLOSED),
470 mIsDeviceLinked(false),
471 mIsMainCamera(true),
472 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700473 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800475 mHdrPlusModeEnabled(false),
476 mIsApInputUsedForHdrPlus(false),
477 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800478 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700479{
480 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mCommon.init(gCamCapability[cameraId]);
482 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#ifndef USE_HAL_3_3
484 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
485#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mCameraDevice.common.close = close_camera_device;
489 mCameraDevice.ops = &mCameraOps;
490 mCameraDevice.priv = this;
491 gCamCapability[cameraId]->version = CAM_HAL_V3;
492 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
493 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
494 gCamCapability[cameraId]->min_num_pp_bufs = 3;
495
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800496 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700497
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800498 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mPendingLiveRequest = 0;
500 mCurrentRequestId = -1;
501 pthread_mutex_init(&mMutex, NULL);
502
503 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
504 mDefaultMetadata[i] = NULL;
505
506 // Getting system props of different kinds
507 char prop[PROPERTY_VALUE_MAX];
508 memset(prop, 0, sizeof(prop));
509 property_get("persist.camera.raw.dump", prop, "0");
510 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800511 property_get("persist.camera.hal3.force.hdr", prop, "0");
512 mForceHdrSnapshot = atoi(prop);
513
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 if (mEnableRawDump)
515 LOGD("Raw dump from Camera HAL enabled");
516
517 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
518 memset(mLdafCalib, 0, sizeof(mLdafCalib));
519
520 memset(prop, 0, sizeof(prop));
521 property_get("persist.camera.tnr.preview", prop, "0");
522 m_bTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800525 property_get("persist.camera.swtnr.preview", prop, "1");
526 m_bSwTnrPreview = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 property_get("persist.camera.tnr.video", prop, "0");
530 m_bTnrVideo = (uint8_t)atoi(prop);
531
532 memset(prop, 0, sizeof(prop));
533 property_get("persist.camera.avtimer.debug", prop, "0");
534 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800535 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700536
Thierry Strudel54dc9782017-02-15 12:12:10 -0800537 memset(prop, 0, sizeof(prop));
538 property_get("persist.camera.cacmode.disable", prop, "0");
539 m_cacModeDisabled = (uint8_t)atoi(prop);
540
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 //Load and read GPU library.
542 lib_surface_utils = NULL;
543 LINK_get_surface_pixel_alignment = NULL;
544 mSurfaceStridePadding = CAM_PAD_TO_32;
545 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
546 if (lib_surface_utils) {
547 *(void **)&LINK_get_surface_pixel_alignment =
548 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
549 if (LINK_get_surface_pixel_alignment) {
550 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
551 }
552 dlclose(lib_surface_utils);
553 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700554
Emilian Peev0f3c3162017-03-15 12:57:46 +0000555 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
556 mPDSupported = (0 <= mPDIndex) ? true : false;
557
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700558 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700559}
560
561/*===========================================================================
562 * FUNCTION : ~QCamera3HardwareInterface
563 *
564 * DESCRIPTION: destructor of QCamera3HardwareInterface
565 *
566 * PARAMETERS : none
567 *
568 * RETURN : none
569 *==========================================================================*/
570QCamera3HardwareInterface::~QCamera3HardwareInterface()
571{
572 LOGD("E");
573
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800574 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700575
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 // Disable power hint and enable the perf lock for close camera
577 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
578 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
579
580 // unlink of dualcam during close camera
581 if (mIsDeviceLinked) {
582 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
583 &m_pDualCamCmdPtr->bundle_info;
584 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
585 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
586 pthread_mutex_lock(&gCamLock);
587
588 if (mIsMainCamera == 1) {
589 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
590 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
591 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
592 // related session id should be session id of linked session
593 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
594 } else {
595 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
596 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
597 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
598 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
599 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800600 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800601 pthread_mutex_unlock(&gCamLock);
602
603 rc = mCameraHandle->ops->set_dual_cam_cmd(
604 mCameraHandle->camera_handle);
605 if (rc < 0) {
606 LOGE("Dualcam: Unlink failed, but still proceed to close");
607 }
608 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700609
610 /* We need to stop all streams before deleting any stream */
611 if (mRawDumpChannel) {
612 mRawDumpChannel->stop();
613 }
614
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700615 if (mHdrPlusRawSrcChannel) {
616 mHdrPlusRawSrcChannel->stop();
617 }
618
Thierry Strudel3d639192016-09-09 11:52:26 -0700619 // NOTE: 'camera3_stream_t *' objects are already freed at
620 // this stage by the framework
621 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
622 it != mStreamInfo.end(); it++) {
623 QCamera3ProcessingChannel *channel = (*it)->channel;
624 if (channel) {
625 channel->stop();
626 }
627 }
628 if (mSupportChannel)
629 mSupportChannel->stop();
630
631 if (mAnalysisChannel) {
632 mAnalysisChannel->stop();
633 }
634 if (mMetadataChannel) {
635 mMetadataChannel->stop();
636 }
637 if (mChannelHandle) {
638 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
639 mChannelHandle);
640 LOGD("stopping channel %d", mChannelHandle);
641 }
642
643 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
644 it != mStreamInfo.end(); it++) {
645 QCamera3ProcessingChannel *channel = (*it)->channel;
646 if (channel)
647 delete channel;
648 free (*it);
649 }
650 if (mSupportChannel) {
651 delete mSupportChannel;
652 mSupportChannel = NULL;
653 }
654
655 if (mAnalysisChannel) {
656 delete mAnalysisChannel;
657 mAnalysisChannel = NULL;
658 }
659 if (mRawDumpChannel) {
660 delete mRawDumpChannel;
661 mRawDumpChannel = NULL;
662 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700663 if (mHdrPlusRawSrcChannel) {
664 delete mHdrPlusRawSrcChannel;
665 mHdrPlusRawSrcChannel = NULL;
666 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700667 if (mDummyBatchChannel) {
668 delete mDummyBatchChannel;
669 mDummyBatchChannel = NULL;
670 }
671
672 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800673 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700674
675 if (mMetadataChannel) {
676 delete mMetadataChannel;
677 mMetadataChannel = NULL;
678 }
679
680 /* Clean up all channels */
681 if (mCameraInitialized) {
682 if(!mFirstConfiguration){
683 //send the last unconfigure
684 cam_stream_size_info_t stream_config_info;
685 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
686 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
687 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800688 m_bIs4KVideo ? 0 :
689 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700690 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700691 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
692 stream_config_info);
693 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
694 if (rc < 0) {
695 LOGE("set_parms failed for unconfigure");
696 }
697 }
698 deinitParameters();
699 }
700
701 if (mChannelHandle) {
702 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
703 mChannelHandle);
704 LOGH("deleting channel %d", mChannelHandle);
705 mChannelHandle = 0;
706 }
707
708 if (mState != CLOSED)
709 closeCamera();
710
711 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
712 req.mPendingBufferList.clear();
713 }
714 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700715 for (pendingRequestIterator i = mPendingRequestsList.begin();
716 i != mPendingRequestsList.end();) {
717 i = erasePendingRequest(i);
718 }
719 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
720 if (mDefaultMetadata[i])
721 free_camera_metadata(mDefaultMetadata[i]);
722
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800723 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700724
725 pthread_cond_destroy(&mRequestCond);
726
727 pthread_cond_destroy(&mBuffersCond);
728
729 pthread_mutex_destroy(&mMutex);
730 LOGD("X");
731}
732
733/*===========================================================================
734 * FUNCTION : erasePendingRequest
735 *
736 * DESCRIPTION: function to erase a desired pending request after freeing any
737 * allocated memory
738 *
739 * PARAMETERS :
740 * @i : iterator pointing to pending request to be erased
741 *
742 * RETURN : iterator pointing to the next request
743 *==========================================================================*/
744QCamera3HardwareInterface::pendingRequestIterator
745 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
746{
747 if (i->input_buffer != NULL) {
748 free(i->input_buffer);
749 i->input_buffer = NULL;
750 }
751 if (i->settings != NULL)
752 free_camera_metadata((camera_metadata_t*)i->settings);
753 return mPendingRequestsList.erase(i);
754}
755
756/*===========================================================================
757 * FUNCTION : camEvtHandle
758 *
759 * DESCRIPTION: Function registered to mm-camera-interface to handle events
760 *
761 * PARAMETERS :
762 * @camera_handle : interface layer camera handle
763 * @evt : ptr to event
764 * @user_data : user data ptr
765 *
766 * RETURN : none
767 *==========================================================================*/
768void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
769 mm_camera_event_t *evt,
770 void *user_data)
771{
772 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
773 if (obj && evt) {
774 switch(evt->server_event_type) {
775 case CAM_EVENT_TYPE_DAEMON_DIED:
776 pthread_mutex_lock(&obj->mMutex);
777 obj->mState = ERROR;
778 pthread_mutex_unlock(&obj->mMutex);
779 LOGE("Fatal, camera daemon died");
780 break;
781
782 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
783 LOGD("HAL got request pull from Daemon");
784 pthread_mutex_lock(&obj->mMutex);
785 obj->mWokenUpByDaemon = true;
786 obj->unblockRequestIfNecessary();
787 pthread_mutex_unlock(&obj->mMutex);
788 break;
789
790 default:
791 LOGW("Warning: Unhandled event %d",
792 evt->server_event_type);
793 break;
794 }
795 } else {
796 LOGE("NULL user_data/evt");
797 }
798}
799
800/*===========================================================================
801 * FUNCTION : openCamera
802 *
803 * DESCRIPTION: open camera
804 *
805 * PARAMETERS :
806 * @hw_device : double ptr for camera device struct
807 *
808 * RETURN : int32_t type of status
809 * NO_ERROR -- success
810 * none-zero failure code
811 *==========================================================================*/
812int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
813{
814 int rc = 0;
815 if (mState != CLOSED) {
816 *hw_device = NULL;
817 return PERMISSION_DENIED;
818 }
819
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
822 mCameraId);
823
824 rc = openCamera();
825 if (rc == 0) {
826 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800827 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700828 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800829 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
832 mCameraId, rc);
833
834 if (rc == NO_ERROR) {
835 mState = OPENED;
836 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800837
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -0700838 if (gHdrPlusClient != nullptr) {
839 mIsApInputUsedForHdrPlus =
840 property_get_bool("persist.camera.hdrplus.apinput", false);
841 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
842 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
843 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800844
Thierry Strudel3d639192016-09-09 11:52:26 -0700845 return rc;
846}
847
848/*===========================================================================
849 * FUNCTION : openCamera
850 *
851 * DESCRIPTION: open camera
852 *
853 * PARAMETERS : none
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera()
860{
861 int rc = 0;
862 char value[PROPERTY_VALUE_MAX];
863
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800864 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 if (mCameraHandle) {
866 LOGE("Failure: Camera already opened");
867 return ALREADY_EXISTS;
868 }
869
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800870 if (gHdrPlusClient != nullptr) {
871 rc = gHdrPlusClient->resumeEasel();
872 if (rc != 0) {
873 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
874 return rc;
875 }
876 }
877
Thierry Strudel3d639192016-09-09 11:52:26 -0700878 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
879 if (rc < 0) {
880 LOGE("Failed to reserve flash for camera id: %d",
881 mCameraId);
882 return UNKNOWN_ERROR;
883 }
884
885 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
886 if (rc) {
887 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
888 return rc;
889 }
890
891 if (!mCameraHandle) {
892 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
893 return -ENODEV;
894 }
895
896 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
897 camEvtHandle, (void *)this);
898
899 if (rc < 0) {
900 LOGE("Error, failed to register event callback");
901 /* Not closing camera here since it is already handled in destructor */
902 return FAILED_TRANSACTION;
903 }
904
905 mExifParams.debug_params =
906 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
907 if (mExifParams.debug_params) {
908 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
909 } else {
910 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
911 return NO_MEMORY;
912 }
913 mFirstConfiguration = true;
914
915 //Notify display HAL that a camera session is active.
916 //But avoid calling the same during bootup because camera service might open/close
917 //cameras at boot time during its initialization and display service will also internally
918 //wait for camera service to initialize first while calling this display API, resulting in a
919 //deadlock situation. Since boot time camera open/close calls are made only to fetch
920 //capabilities, no need of this display bw optimization.
921 //Use "service.bootanim.exit" property to know boot status.
922 property_get("service.bootanim.exit", value, "0");
923 if (atoi(value) == 1) {
924 pthread_mutex_lock(&gCamLock);
925 if (gNumCameraSessions++ == 0) {
926 setCameraLaunchStatus(true);
927 }
928 pthread_mutex_unlock(&gCamLock);
929 }
930
931 //fill the session id needed while linking dual cam
932 pthread_mutex_lock(&gCamLock);
933 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
934 &sessionId[mCameraId]);
935 pthread_mutex_unlock(&gCamLock);
936
937 if (rc < 0) {
938 LOGE("Error, failed to get sessiion id");
939 return UNKNOWN_ERROR;
940 } else {
941 //Allocate related cam sync buffer
942 //this is needed for the payload that goes along with bundling cmd for related
943 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700944 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
945 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700946 if(rc != OK) {
947 rc = NO_MEMORY;
948 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
949 return NO_MEMORY;
950 }
951
952 //Map memory for related cam sync buffer
953 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
955 m_pDualCamCmdHeap->getFd(0),
956 sizeof(cam_dual_camera_cmd_info_t),
957 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700958 if(rc < 0) {
959 LOGE("Dualcam: failed to map Related cam sync buffer");
960 rc = FAILED_TRANSACTION;
961 return NO_MEMORY;
962 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700963 m_pDualCamCmdPtr =
964 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700965 }
966
967 LOGH("mCameraId=%d",mCameraId);
968
969 return NO_ERROR;
970}
971
972/*===========================================================================
973 * FUNCTION : closeCamera
974 *
975 * DESCRIPTION: close camera
976 *
977 * PARAMETERS : none
978 *
979 * RETURN : int32_t type of status
980 * NO_ERROR -- success
981 * none-zero failure code
982 *==========================================================================*/
983int QCamera3HardwareInterface::closeCamera()
984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700986 int rc = NO_ERROR;
987 char value[PROPERTY_VALUE_MAX];
988
989 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
990 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991
992 // unmap memory for related cam sync buffer
993 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800994 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 if (NULL != m_pDualCamCmdHeap) {
996 m_pDualCamCmdHeap->deallocate();
997 delete m_pDualCamCmdHeap;
998 m_pDualCamCmdHeap = NULL;
999 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001000 }
1001
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1003 mCameraHandle = NULL;
1004
1005 //reset session id to some invalid id
1006 pthread_mutex_lock(&gCamLock);
1007 sessionId[mCameraId] = 0xDEADBEEF;
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 //Notify display HAL that there is no active camera session
1011 //but avoid calling the same during bootup. Refer to openCamera
1012 //for more details.
1013 property_get("service.bootanim.exit", value, "0");
1014 if (atoi(value) == 1) {
1015 pthread_mutex_lock(&gCamLock);
1016 if (--gNumCameraSessions == 0) {
1017 setCameraLaunchStatus(false);
1018 }
1019 pthread_mutex_unlock(&gCamLock);
1020 }
1021
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 if (mExifParams.debug_params) {
1023 free(mExifParams.debug_params);
1024 mExifParams.debug_params = NULL;
1025 }
1026 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1027 LOGW("Failed to release flash for camera id: %d",
1028 mCameraId);
1029 }
1030 mState = CLOSED;
1031 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1032 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001033
1034 if (gHdrPlusClient != nullptr) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001035 // Disable HDR+ mode.
1036 disableHdrPlusModeLocked();
1037 // Disconnect Easel if it's connected.
1038 pthread_mutex_lock(&gCamLock);
1039 if (gEaselConnected) {
1040 gHdrPlusClient->disconnect();
1041 gEaselConnected = false;
1042 }
1043 pthread_mutex_unlock(&gCamLock);
1044
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001045 rc = gHdrPlusClient->suspendEasel();
1046 if (rc != 0) {
1047 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1048 }
1049 }
1050
Thierry Strudel3d639192016-09-09 11:52:26 -07001051 return rc;
1052}
1053
1054/*===========================================================================
1055 * FUNCTION : initialize
1056 *
1057 * DESCRIPTION: Initialize frameworks callback functions
1058 *
1059 * PARAMETERS :
1060 * @callback_ops : callback function to frameworks
1061 *
1062 * RETURN :
1063 *
1064 *==========================================================================*/
1065int QCamera3HardwareInterface::initialize(
1066 const struct camera3_callback_ops *callback_ops)
1067{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001068 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001069 int rc;
1070
1071 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1072 pthread_mutex_lock(&mMutex);
1073
1074 // Validate current state
1075 switch (mState) {
1076 case OPENED:
1077 /* valid state */
1078 break;
1079 default:
1080 LOGE("Invalid state %d", mState);
1081 rc = -ENODEV;
1082 goto err1;
1083 }
1084
1085 rc = initParameters();
1086 if (rc < 0) {
1087 LOGE("initParamters failed %d", rc);
1088 goto err1;
1089 }
1090 mCallbackOps = callback_ops;
1091
1092 mChannelHandle = mCameraHandle->ops->add_channel(
1093 mCameraHandle->camera_handle, NULL, NULL, this);
1094 if (mChannelHandle == 0) {
1095 LOGE("add_channel failed");
1096 rc = -ENOMEM;
1097 pthread_mutex_unlock(&mMutex);
1098 return rc;
1099 }
1100
1101 pthread_mutex_unlock(&mMutex);
1102 mCameraInitialized = true;
1103 mState = INITIALIZED;
1104 LOGI("X");
1105 return 0;
1106
1107err1:
1108 pthread_mutex_unlock(&mMutex);
1109 return rc;
1110}
1111
1112/*===========================================================================
1113 * FUNCTION : validateStreamDimensions
1114 *
1115 * DESCRIPTION: Check if the configuration requested are those advertised
1116 *
1117 * PARAMETERS :
1118 * @stream_list : streams to be configured
1119 *
1120 * RETURN :
1121 *
1122 *==========================================================================*/
1123int QCamera3HardwareInterface::validateStreamDimensions(
1124 camera3_stream_configuration_t *streamList)
1125{
1126 int rc = NO_ERROR;
1127 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001128 uint32_t depthWidth = 0;
1129 uint32_t depthHeight = 0;
1130 if (mPDSupported) {
1131 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1132 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1133 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001134
1135 camera3_stream_t *inputStream = NULL;
1136 /*
1137 * Loop through all streams to find input stream if it exists*
1138 */
1139 for (size_t i = 0; i< streamList->num_streams; i++) {
1140 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1141 if (inputStream != NULL) {
1142 LOGE("Error, Multiple input streams requested");
1143 return -EINVAL;
1144 }
1145 inputStream = streamList->streams[i];
1146 }
1147 }
1148 /*
1149 * Loop through all streams requested in configuration
1150 * Check if unsupported sizes have been requested on any of them
1151 */
1152 for (size_t j = 0; j < streamList->num_streams; j++) {
1153 bool sizeFound = false;
1154 camera3_stream_t *newStream = streamList->streams[j];
1155
1156 uint32_t rotatedHeight = newStream->height;
1157 uint32_t rotatedWidth = newStream->width;
1158 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1159 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1160 rotatedHeight = newStream->width;
1161 rotatedWidth = newStream->height;
1162 }
1163
1164 /*
1165 * Sizes are different for each type of stream format check against
1166 * appropriate table.
1167 */
1168 switch (newStream->format) {
1169 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1170 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1171 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001172 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1173 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1174 mPDSupported) {
1175 if ((depthWidth == newStream->width) &&
1176 (depthHeight == newStream->height)) {
1177 sizeFound = true;
1178 }
1179 break;
1180 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001181 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1182 for (size_t i = 0; i < count; i++) {
1183 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1184 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1185 sizeFound = true;
1186 break;
1187 }
1188 }
1189 break;
1190 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1192 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001193 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001194 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001195 if ((depthSamplesCount == newStream->width) &&
1196 (1 == newStream->height)) {
1197 sizeFound = true;
1198 }
1199 break;
1200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001201 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1202 /* Verify set size against generated sizes table */
1203 for (size_t i = 0; i < count; i++) {
1204 if (((int32_t)rotatedWidth ==
1205 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1206 ((int32_t)rotatedHeight ==
1207 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1208 sizeFound = true;
1209 break;
1210 }
1211 }
1212 break;
1213 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1214 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1215 default:
1216 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1217 || newStream->stream_type == CAMERA3_STREAM_INPUT
1218 || IS_USAGE_ZSL(newStream->usage)) {
1219 if (((int32_t)rotatedWidth ==
1220 gCamCapability[mCameraId]->active_array_size.width) &&
1221 ((int32_t)rotatedHeight ==
1222 gCamCapability[mCameraId]->active_array_size.height)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 /* We could potentially break here to enforce ZSL stream
1227 * set from frameworks always is full active array size
1228 * but it is not clear from the spc if framework will always
1229 * follow that, also we have logic to override to full array
1230 * size, so keeping the logic lenient at the moment
1231 */
1232 }
1233 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1234 MAX_SIZES_CNT);
1235 for (size_t i = 0; i < count; i++) {
1236 if (((int32_t)rotatedWidth ==
1237 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1238 ((int32_t)rotatedHeight ==
1239 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1240 sizeFound = true;
1241 break;
1242 }
1243 }
1244 break;
1245 } /* End of switch(newStream->format) */
1246
1247 /* We error out even if a single stream has unsupported size set */
1248 if (!sizeFound) {
1249 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1250 rotatedWidth, rotatedHeight, newStream->format,
1251 gCamCapability[mCameraId]->active_array_size.width,
1252 gCamCapability[mCameraId]->active_array_size.height);
1253 rc = -EINVAL;
1254 break;
1255 }
1256 } /* End of for each stream */
1257 return rc;
1258}
1259
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001260/*===========================================================================
1261 * FUNCTION : validateUsageFlags
1262 *
1263 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1264 *
1265 * PARAMETERS :
1266 * @stream_list : streams to be configured
1267 *
1268 * RETURN :
1269 * NO_ERROR if the usage flags are supported
1270 * error code if usage flags are not supported
1271 *
1272 *==========================================================================*/
1273int QCamera3HardwareInterface::validateUsageFlags(
1274 const camera3_stream_configuration_t* streamList)
1275{
1276 for (size_t j = 0; j < streamList->num_streams; j++) {
1277 const camera3_stream_t *newStream = streamList->streams[j];
1278
1279 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1280 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1281 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1282 continue;
1283 }
1284
1285 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1286 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1287 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1288 bool forcePreviewUBWC = true;
1289 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1290 forcePreviewUBWC = false;
1291 }
1292 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1293 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1294 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1295 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1296 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1297 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1298
1299 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1300 // So color spaces will always match.
1301
1302 // Check whether underlying formats of shared streams match.
1303 if (isVideo && isPreview && videoFormat != previewFormat) {
1304 LOGE("Combined video and preview usage flag is not supported");
1305 return -EINVAL;
1306 }
1307 if (isPreview && isZSL && previewFormat != zslFormat) {
1308 LOGE("Combined preview and zsl usage flag is not supported");
1309 return -EINVAL;
1310 }
1311 if (isVideo && isZSL && videoFormat != zslFormat) {
1312 LOGE("Combined video and zsl usage flag is not supported");
1313 return -EINVAL;
1314 }
1315 }
1316 return NO_ERROR;
1317}
1318
1319/*===========================================================================
1320 * FUNCTION : validateUsageFlagsForEis
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlagsForEis(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1339 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1340
1341 // Because EIS is "hard-coded" for certain use case, and current
1342 // implementation doesn't support shared preview and video on the same
1343 // stream, return failure if EIS is forced on.
1344 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1345 LOGE("Combined video and preview usage flag is not supported due to EIS");
1346 return -EINVAL;
1347 }
1348 }
1349 return NO_ERROR;
1350}
1351
Thierry Strudel3d639192016-09-09 11:52:26 -07001352/*==============================================================================
1353 * FUNCTION : isSupportChannelNeeded
1354 *
1355 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1356 *
1357 * PARAMETERS :
1358 * @stream_list : streams to be configured
1359 * @stream_config_info : the config info for streams to be configured
1360 *
1361 * RETURN : Boolen true/false decision
1362 *
1363 *==========================================================================*/
1364bool QCamera3HardwareInterface::isSupportChannelNeeded(
1365 camera3_stream_configuration_t *streamList,
1366 cam_stream_size_info_t stream_config_info)
1367{
1368 uint32_t i;
1369 bool pprocRequested = false;
1370 /* Check for conditions where PProc pipeline does not have any streams*/
1371 for (i = 0; i < stream_config_info.num_streams; i++) {
1372 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1373 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1374 pprocRequested = true;
1375 break;
1376 }
1377 }
1378
1379 if (pprocRequested == false )
1380 return true;
1381
1382 /* Dummy stream needed if only raw or jpeg streams present */
1383 for (i = 0; i < streamList->num_streams; i++) {
1384 switch(streamList->streams[i]->format) {
1385 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1386 case HAL_PIXEL_FORMAT_RAW10:
1387 case HAL_PIXEL_FORMAT_RAW16:
1388 case HAL_PIXEL_FORMAT_BLOB:
1389 break;
1390 default:
1391 return false;
1392 }
1393 }
1394 return true;
1395}
1396
1397/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001398 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001399 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001400 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001401 *
1402 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001403 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001404 *
1405 * RETURN : int32_t type of status
1406 * NO_ERROR -- success
1407 * none-zero failure code
1408 *
1409 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001410int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001411{
1412 int32_t rc = NO_ERROR;
1413
1414 cam_dimension_t max_dim = {0, 0};
1415 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1416 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1417 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1418 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1419 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1420 }
1421
1422 clear_metadata_buffer(mParameters);
1423
1424 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1425 max_dim);
1426 if (rc != NO_ERROR) {
1427 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1428 return rc;
1429 }
1430
1431 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1432 if (rc != NO_ERROR) {
1433 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1434 return rc;
1435 }
1436
1437 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001439
1440 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1441 mParameters);
1442 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 return rc;
1445 }
1446
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001447 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001448 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1449 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1450 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1451 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1452 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001453
1454 return rc;
1455}
1456
1457/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 * FUNCTION : addToPPFeatureMask
1459 *
1460 * DESCRIPTION: add additional features to pp feature mask based on
1461 * stream type and usecase
1462 *
1463 * PARAMETERS :
1464 * @stream_format : stream type for feature mask
1465 * @stream_idx : stream idx within postprocess_mask list to change
1466 *
1467 * RETURN : NULL
1468 *
1469 *==========================================================================*/
1470void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1471 uint32_t stream_idx)
1472{
1473 char feature_mask_value[PROPERTY_VALUE_MAX];
1474 cam_feature_mask_t feature_mask;
1475 int args_converted;
1476 int property_len;
1477
1478 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001479#ifdef _LE_CAMERA_
1480 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1481 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1482 property_len = property_get("persist.camera.hal3.feature",
1483 feature_mask_value, swtnr_feature_mask_value);
1484#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001485 property_len = property_get("persist.camera.hal3.feature",
1486 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001487#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1489 (feature_mask_value[1] == 'x')) {
1490 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1491 } else {
1492 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1493 }
1494 if (1 != args_converted) {
1495 feature_mask = 0;
1496 LOGE("Wrong feature mask %s", feature_mask_value);
1497 return;
1498 }
1499
1500 switch (stream_format) {
1501 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1502 /* Add LLVD to pp feature mask only if video hint is enabled */
1503 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1504 mStreamConfigInfo.postprocess_mask[stream_idx]
1505 |= CAM_QTI_FEATURE_SW_TNR;
1506 LOGH("Added SW TNR to pp feature mask");
1507 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1508 mStreamConfigInfo.postprocess_mask[stream_idx]
1509 |= CAM_QCOM_FEATURE_LLVD;
1510 LOGH("Added LLVD SeeMore to pp feature mask");
1511 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001512 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1513 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1514 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1515 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001516 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1517 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1518 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1519 CAM_QTI_FEATURE_BINNING_CORRECTION;
1520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001521 break;
1522 }
1523 default:
1524 break;
1525 }
1526 LOGD("PP feature mask %llx",
1527 mStreamConfigInfo.postprocess_mask[stream_idx]);
1528}
1529
1530/*==============================================================================
1531 * FUNCTION : updateFpsInPreviewBuffer
1532 *
1533 * DESCRIPTION: update FPS information in preview buffer.
1534 *
1535 * PARAMETERS :
1536 * @metadata : pointer to metadata buffer
1537 * @frame_number: frame_number to look for in pending buffer list
1538 *
1539 * RETURN : None
1540 *
1541 *==========================================================================*/
1542void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1543 uint32_t frame_number)
1544{
1545 // Mark all pending buffers for this particular request
1546 // with corresponding framerate information
1547 for (List<PendingBuffersInRequest>::iterator req =
1548 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1549 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1550 for(List<PendingBufferInfo>::iterator j =
1551 req->mPendingBufferList.begin();
1552 j != req->mPendingBufferList.end(); j++) {
1553 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1554 if ((req->frame_number == frame_number) &&
1555 (channel->getStreamTypeMask() &
1556 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1557 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1558 CAM_INTF_PARM_FPS_RANGE, metadata) {
1559 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1560 struct private_handle_t *priv_handle =
1561 (struct private_handle_t *)(*(j->buffer));
1562 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1563 }
1564 }
1565 }
1566 }
1567}
1568
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001569/*==============================================================================
1570 * FUNCTION : updateTimeStampInPendingBuffers
1571 *
1572 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1573 * of a frame number
1574 *
1575 * PARAMETERS :
1576 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1577 * @timestamp : timestamp to be set
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1583 uint32_t frameNumber, nsecs_t timestamp)
1584{
1585 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1586 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1587 if (req->frame_number != frameNumber)
1588 continue;
1589
1590 for (auto k = req->mPendingBufferList.begin();
1591 k != req->mPendingBufferList.end(); k++ ) {
1592 struct private_handle_t *priv_handle =
1593 (struct private_handle_t *) (*(k->buffer));
1594 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1595 }
1596 }
1597 return;
1598}
1599
Thierry Strudel3d639192016-09-09 11:52:26 -07001600/*===========================================================================
1601 * FUNCTION : configureStreams
1602 *
1603 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1604 * and output streams.
1605 *
1606 * PARAMETERS :
1607 * @stream_list : streams to be configured
1608 *
1609 * RETURN :
1610 *
1611 *==========================================================================*/
1612int QCamera3HardwareInterface::configureStreams(
1613 camera3_stream_configuration_t *streamList)
1614{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001615 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 int rc = 0;
1617
1618 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001619 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001621 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001622
1623 return rc;
1624}
1625
1626/*===========================================================================
1627 * FUNCTION : configureStreamsPerfLocked
1628 *
1629 * DESCRIPTION: configureStreams while perfLock is held.
1630 *
1631 * PARAMETERS :
1632 * @stream_list : streams to be configured
1633 *
1634 * RETURN : int32_t type of status
1635 * NO_ERROR -- success
1636 * none-zero failure code
1637 *==========================================================================*/
1638int QCamera3HardwareInterface::configureStreamsPerfLocked(
1639 camera3_stream_configuration_t *streamList)
1640{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001641 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001642 int rc = 0;
1643
1644 // Sanity check stream_list
1645 if (streamList == NULL) {
1646 LOGE("NULL stream configuration");
1647 return BAD_VALUE;
1648 }
1649 if (streamList->streams == NULL) {
1650 LOGE("NULL stream list");
1651 return BAD_VALUE;
1652 }
1653
1654 if (streamList->num_streams < 1) {
1655 LOGE("Bad number of streams requested: %d",
1656 streamList->num_streams);
1657 return BAD_VALUE;
1658 }
1659
1660 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1661 LOGE("Maximum number of streams %d exceeded: %d",
1662 MAX_NUM_STREAMS, streamList->num_streams);
1663 return BAD_VALUE;
1664 }
1665
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001666 rc = validateUsageFlags(streamList);
1667 if (rc != NO_ERROR) {
1668 return rc;
1669 }
1670
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 mOpMode = streamList->operation_mode;
1672 LOGD("mOpMode: %d", mOpMode);
1673
1674 /* first invalidate all the steams in the mStreamList
1675 * if they appear again, they will be validated */
1676 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1677 it != mStreamInfo.end(); it++) {
1678 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1679 if (channel) {
1680 channel->stop();
1681 }
1682 (*it)->status = INVALID;
1683 }
1684
1685 if (mRawDumpChannel) {
1686 mRawDumpChannel->stop();
1687 delete mRawDumpChannel;
1688 mRawDumpChannel = NULL;
1689 }
1690
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001691 if (mHdrPlusRawSrcChannel) {
1692 mHdrPlusRawSrcChannel->stop();
1693 delete mHdrPlusRawSrcChannel;
1694 mHdrPlusRawSrcChannel = NULL;
1695 }
1696
Thierry Strudel3d639192016-09-09 11:52:26 -07001697 if (mSupportChannel)
1698 mSupportChannel->stop();
1699
1700 if (mAnalysisChannel) {
1701 mAnalysisChannel->stop();
1702 }
1703 if (mMetadataChannel) {
1704 /* If content of mStreamInfo is not 0, there is metadata stream */
1705 mMetadataChannel->stop();
1706 }
1707 if (mChannelHandle) {
1708 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1709 mChannelHandle);
1710 LOGD("stopping channel %d", mChannelHandle);
1711 }
1712
1713 pthread_mutex_lock(&mMutex);
1714
1715 // Check state
1716 switch (mState) {
1717 case INITIALIZED:
1718 case CONFIGURED:
1719 case STARTED:
1720 /* valid state */
1721 break;
1722 default:
1723 LOGE("Invalid state %d", mState);
1724 pthread_mutex_unlock(&mMutex);
1725 return -ENODEV;
1726 }
1727
1728 /* Check whether we have video stream */
1729 m_bIs4KVideo = false;
1730 m_bIsVideo = false;
1731 m_bEisSupportedSize = false;
1732 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001733 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001734 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001735 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001736 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 uint32_t videoWidth = 0U;
1738 uint32_t videoHeight = 0U;
1739 size_t rawStreamCnt = 0;
1740 size_t stallStreamCnt = 0;
1741 size_t processedStreamCnt = 0;
1742 // Number of streams on ISP encoder path
1743 size_t numStreamsOnEncoder = 0;
1744 size_t numYuv888OnEncoder = 0;
1745 bool bYuv888OverrideJpeg = false;
1746 cam_dimension_t largeYuv888Size = {0, 0};
1747 cam_dimension_t maxViewfinderSize = {0, 0};
1748 bool bJpegExceeds4K = false;
1749 bool bJpegOnEncoder = false;
1750 bool bUseCommonFeatureMask = false;
1751 cam_feature_mask_t commonFeatureMask = 0;
1752 bool bSmallJpegSize = false;
1753 uint32_t width_ratio;
1754 uint32_t height_ratio;
1755 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1756 camera3_stream_t *inputStream = NULL;
1757 bool isJpeg = false;
1758 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001759 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001760 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001761
1762 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1763
1764 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001765 uint8_t eis_prop_set;
1766 uint32_t maxEisWidth = 0;
1767 uint32_t maxEisHeight = 0;
1768
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001769 // Initialize all instant AEC related variables
1770 mInstantAEC = false;
1771 mResetInstantAEC = false;
1772 mInstantAECSettledFrameNumber = 0;
1773 mAecSkipDisplayFrameBound = 0;
1774 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001775 mCurrFeatureState = 0;
1776 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001777
Thierry Strudel3d639192016-09-09 11:52:26 -07001778 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1779
1780 size_t count = IS_TYPE_MAX;
1781 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1782 for (size_t i = 0; i < count; i++) {
1783 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001784 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1785 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001786 break;
1787 }
1788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001789
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001790 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001791 maxEisWidth = MAX_EIS_WIDTH;
1792 maxEisHeight = MAX_EIS_HEIGHT;
1793 }
1794
1795 /* EIS setprop control */
1796 char eis_prop[PROPERTY_VALUE_MAX];
1797 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001798 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 eis_prop_set = (uint8_t)atoi(eis_prop);
1800
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001801 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001802 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1803
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001804 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1805 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001806
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 /* stream configurations */
1808 for (size_t i = 0; i < streamList->num_streams; i++) {
1809 camera3_stream_t *newStream = streamList->streams[i];
1810 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1811 "height = %d, rotation = %d, usage = 0x%x",
1812 i, newStream->stream_type, newStream->format,
1813 newStream->width, newStream->height, newStream->rotation,
1814 newStream->usage);
1815 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1816 newStream->stream_type == CAMERA3_STREAM_INPUT){
1817 isZsl = true;
1818 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001819 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1820 IS_USAGE_PREVIEW(newStream->usage)) {
1821 isPreview = true;
1822 }
1823
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1825 inputStream = newStream;
1826 }
1827
Emilian Peev7650c122017-01-19 08:24:33 -08001828 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1829 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 isJpeg = true;
1831 jpegSize.width = newStream->width;
1832 jpegSize.height = newStream->height;
1833 if (newStream->width > VIDEO_4K_WIDTH ||
1834 newStream->height > VIDEO_4K_HEIGHT)
1835 bJpegExceeds4K = true;
1836 }
1837
1838 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1839 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1840 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001841 // In HAL3 we can have multiple different video streams.
1842 // The variables video width and height are used below as
1843 // dimensions of the biggest of them
1844 if (videoWidth < newStream->width ||
1845 videoHeight < newStream->height) {
1846 videoWidth = newStream->width;
1847 videoHeight = newStream->height;
1848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1850 (VIDEO_4K_HEIGHT <= newStream->height)) {
1851 m_bIs4KVideo = true;
1852 }
1853 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1854 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001855
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 }
1857 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1858 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1859 switch (newStream->format) {
1860 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001861 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1862 depthPresent = true;
1863 break;
1864 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 stallStreamCnt++;
1866 if (isOnEncoder(maxViewfinderSize, newStream->width,
1867 newStream->height)) {
1868 numStreamsOnEncoder++;
1869 bJpegOnEncoder = true;
1870 }
1871 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1872 newStream->width);
1873 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1874 newStream->height);;
1875 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1876 "FATAL: max_downscale_factor cannot be zero and so assert");
1877 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1878 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1879 LOGH("Setting small jpeg size flag to true");
1880 bSmallJpegSize = true;
1881 }
1882 break;
1883 case HAL_PIXEL_FORMAT_RAW10:
1884 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1885 case HAL_PIXEL_FORMAT_RAW16:
1886 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1888 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1889 pdStatCount++;
1890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 break;
1892 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1893 processedStreamCnt++;
1894 if (isOnEncoder(maxViewfinderSize, newStream->width,
1895 newStream->height)) {
1896 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1897 !IS_USAGE_ZSL(newStream->usage)) {
1898 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900 numStreamsOnEncoder++;
1901 }
1902 break;
1903 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1904 processedStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 // If Yuv888 size is not greater than 4K, set feature mask
1908 // to SUPERSET so that it support concurrent request on
1909 // YUV and JPEG.
1910 if (newStream->width <= VIDEO_4K_WIDTH &&
1911 newStream->height <= VIDEO_4K_HEIGHT) {
1912 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 }
1914 numStreamsOnEncoder++;
1915 numYuv888OnEncoder++;
1916 largeYuv888Size.width = newStream->width;
1917 largeYuv888Size.height = newStream->height;
1918 }
1919 break;
1920 default:
1921 processedStreamCnt++;
1922 if (isOnEncoder(maxViewfinderSize, newStream->width,
1923 newStream->height)) {
1924 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1925 numStreamsOnEncoder++;
1926 }
1927 break;
1928 }
1929
1930 }
1931 }
1932
1933 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1934 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1935 !m_bIsVideo) {
1936 m_bEisEnable = false;
1937 }
1938
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001939 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1940 pthread_mutex_unlock(&mMutex);
1941 return -EINVAL;
1942 }
1943
Thierry Strudel54dc9782017-02-15 12:12:10 -08001944 uint8_t forceEnableTnr = 0;
1945 char tnr_prop[PROPERTY_VALUE_MAX];
1946 memset(tnr_prop, 0, sizeof(tnr_prop));
1947 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1948 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 /* Logic to enable/disable TNR based on specific config size/etc.*/
1951 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1952 ((videoWidth == 1920 && videoHeight == 1080) ||
1953 (videoWidth == 1280 && videoHeight == 720)) &&
1954 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1955 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001956 else if (forceEnableTnr)
1957 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001958
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001959 char videoHdrProp[PROPERTY_VALUE_MAX];
1960 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1961 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1962 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1963
1964 if (hdr_mode_prop == 1 && m_bIsVideo &&
1965 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1966 m_bVideoHdrEnabled = true;
1967 else
1968 m_bVideoHdrEnabled = false;
1969
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 /* Check if num_streams is sane */
1972 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1973 rawStreamCnt > MAX_RAW_STREAMS ||
1974 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1975 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1976 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1977 pthread_mutex_unlock(&mMutex);
1978 return -EINVAL;
1979 }
1980 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001981 if (isZsl && m_bIs4KVideo) {
1982 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 pthread_mutex_unlock(&mMutex);
1984 return -EINVAL;
1985 }
1986 /* Check if stream sizes are sane */
1987 if (numStreamsOnEncoder > 2) {
1988 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1989 pthread_mutex_unlock(&mMutex);
1990 return -EINVAL;
1991 } else if (1 < numStreamsOnEncoder){
1992 bUseCommonFeatureMask = true;
1993 LOGH("Multiple streams above max viewfinder size, common mask needed");
1994 }
1995
1996 /* Check if BLOB size is greater than 4k in 4k recording case */
1997 if (m_bIs4KVideo && bJpegExceeds4K) {
1998 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Emilian Peev7650c122017-01-19 08:24:33 -08002003 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2004 depthPresent) {
2005 LOGE("HAL doesn't support depth streams in HFR mode!");
2006 pthread_mutex_unlock(&mMutex);
2007 return -EINVAL;
2008 }
2009
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2011 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2012 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2013 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2014 // configurations:
2015 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2016 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2017 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2018 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2019 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2020 __func__);
2021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024
2025 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2026 // the YUV stream's size is greater or equal to the JPEG size, set common
2027 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2028 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2029 jpegSize.width, jpegSize.height) &&
2030 largeYuv888Size.width > jpegSize.width &&
2031 largeYuv888Size.height > jpegSize.height) {
2032 bYuv888OverrideJpeg = true;
2033 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2034 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035 }
2036
2037 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2038 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2039 commonFeatureMask);
2040 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2041 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2042
2043 rc = validateStreamDimensions(streamList);
2044 if (rc == NO_ERROR) {
2045 rc = validateStreamRotations(streamList);
2046 }
2047 if (rc != NO_ERROR) {
2048 LOGE("Invalid stream configuration requested!");
2049 pthread_mutex_unlock(&mMutex);
2050 return rc;
2051 }
2052
Emilian Peev0f3c3162017-03-15 12:57:46 +00002053 if (1 < pdStatCount) {
2054 LOGE("HAL doesn't support multiple PD streams");
2055 pthread_mutex_unlock(&mMutex);
2056 return -EINVAL;
2057 }
2058
2059 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2060 (1 == pdStatCount)) {
2061 LOGE("HAL doesn't support PD streams in HFR mode!");
2062 pthread_mutex_unlock(&mMutex);
2063 return -EINVAL;
2064 }
2065
Thierry Strudel3d639192016-09-09 11:52:26 -07002066 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2067 for (size_t i = 0; i < streamList->num_streams; i++) {
2068 camera3_stream_t *newStream = streamList->streams[i];
2069 LOGH("newStream type = %d, stream format = %d "
2070 "stream size : %d x %d, stream rotation = %d",
2071 newStream->stream_type, newStream->format,
2072 newStream->width, newStream->height, newStream->rotation);
2073 //if the stream is in the mStreamList validate it
2074 bool stream_exists = false;
2075 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2076 it != mStreamInfo.end(); it++) {
2077 if ((*it)->stream == newStream) {
2078 QCamera3ProcessingChannel *channel =
2079 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2080 stream_exists = true;
2081 if (channel)
2082 delete channel;
2083 (*it)->status = VALID;
2084 (*it)->stream->priv = NULL;
2085 (*it)->channel = NULL;
2086 }
2087 }
2088 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2089 //new stream
2090 stream_info_t* stream_info;
2091 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2092 if (!stream_info) {
2093 LOGE("Could not allocate stream info");
2094 rc = -ENOMEM;
2095 pthread_mutex_unlock(&mMutex);
2096 return rc;
2097 }
2098 stream_info->stream = newStream;
2099 stream_info->status = VALID;
2100 stream_info->channel = NULL;
2101 mStreamInfo.push_back(stream_info);
2102 }
2103 /* Covers Opaque ZSL and API1 F/W ZSL */
2104 if (IS_USAGE_ZSL(newStream->usage)
2105 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2106 if (zslStream != NULL) {
2107 LOGE("Multiple input/reprocess streams requested!");
2108 pthread_mutex_unlock(&mMutex);
2109 return BAD_VALUE;
2110 }
2111 zslStream = newStream;
2112 }
2113 /* Covers YUV reprocess */
2114 if (inputStream != NULL) {
2115 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2116 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2117 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2118 && inputStream->width == newStream->width
2119 && inputStream->height == newStream->height) {
2120 if (zslStream != NULL) {
2121 /* This scenario indicates multiple YUV streams with same size
2122 * as input stream have been requested, since zsl stream handle
2123 * is solely use for the purpose of overriding the size of streams
2124 * which share h/w streams we will just make a guess here as to
2125 * which of the stream is a ZSL stream, this will be refactored
2126 * once we make generic logic for streams sharing encoder output
2127 */
2128 LOGH("Warning, Multiple ip/reprocess streams requested!");
2129 }
2130 zslStream = newStream;
2131 }
2132 }
2133 }
2134
2135 /* If a zsl stream is set, we know that we have configured at least one input or
2136 bidirectional stream */
2137 if (NULL != zslStream) {
2138 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2139 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2140 mInputStreamInfo.format = zslStream->format;
2141 mInputStreamInfo.usage = zslStream->usage;
2142 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2143 mInputStreamInfo.dim.width,
2144 mInputStreamInfo.dim.height,
2145 mInputStreamInfo.format, mInputStreamInfo.usage);
2146 }
2147
2148 cleanAndSortStreamInfo();
2149 if (mMetadataChannel) {
2150 delete mMetadataChannel;
2151 mMetadataChannel = NULL;
2152 }
2153 if (mSupportChannel) {
2154 delete mSupportChannel;
2155 mSupportChannel = NULL;
2156 }
2157
2158 if (mAnalysisChannel) {
2159 delete mAnalysisChannel;
2160 mAnalysisChannel = NULL;
2161 }
2162
2163 if (mDummyBatchChannel) {
2164 delete mDummyBatchChannel;
2165 mDummyBatchChannel = NULL;
2166 }
2167
Emilian Peev7650c122017-01-19 08:24:33 -08002168 if (mDepthChannel) {
2169 mDepthChannel = NULL;
2170 }
2171
Thierry Strudel2896d122017-02-23 19:18:03 -08002172 char is_type_value[PROPERTY_VALUE_MAX];
2173 property_get("persist.camera.is_type", is_type_value, "4");
2174 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2175
Thierry Strudel3d639192016-09-09 11:52:26 -07002176 //Create metadata channel and initialize it
2177 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2178 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2179 gCamCapability[mCameraId]->color_arrangement);
2180 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2181 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002182 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 if (mMetadataChannel == NULL) {
2184 LOGE("failed to allocate metadata channel");
2185 rc = -ENOMEM;
2186 pthread_mutex_unlock(&mMutex);
2187 return rc;
2188 }
2189 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2190 if (rc < 0) {
2191 LOGE("metadata channel initialization failed");
2192 delete mMetadataChannel;
2193 mMetadataChannel = NULL;
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Thierry Strudel2896d122017-02-23 19:18:03 -08002198 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002199 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002200 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002201 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2202 /* Allocate channel objects for the requested streams */
2203 for (size_t i = 0; i < streamList->num_streams; i++) {
2204 camera3_stream_t *newStream = streamList->streams[i];
2205 uint32_t stream_usage = newStream->usage;
2206 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2207 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2208 struct camera_info *p_info = NULL;
2209 pthread_mutex_lock(&gCamLock);
2210 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2211 pthread_mutex_unlock(&gCamLock);
2212 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2213 || IS_USAGE_ZSL(newStream->usage)) &&
2214 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002215 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002216 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002217 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2218 if (bUseCommonFeatureMask)
2219 zsl_ppmask = commonFeatureMask;
2220 else
2221 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002222 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002223 if (numStreamsOnEncoder > 0)
2224 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2225 else
2226 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002228 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002230 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002231 LOGH("Input stream configured, reprocess config");
2232 } else {
2233 //for non zsl streams find out the format
2234 switch (newStream->format) {
2235 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2236 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002237 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002238 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2239 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2240 /* add additional features to pp feature mask */
2241 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2242 mStreamConfigInfo.num_streams);
2243
2244 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2246 CAM_STREAM_TYPE_VIDEO;
2247 if (m_bTnrEnabled && m_bTnrVideo) {
2248 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2249 CAM_QCOM_FEATURE_CPP_TNR;
2250 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2251 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2252 ~CAM_QCOM_FEATURE_CDS;
2253 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002254 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2255 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2256 CAM_QTI_FEATURE_PPEISCORE;
2257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 } else {
2259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2260 CAM_STREAM_TYPE_PREVIEW;
2261 if (m_bTnrEnabled && m_bTnrPreview) {
2262 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2263 CAM_QCOM_FEATURE_CPP_TNR;
2264 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2265 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2266 ~CAM_QCOM_FEATURE_CDS;
2267 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002268 if(!m_bSwTnrPreview) {
2269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2270 ~CAM_QTI_FEATURE_SW_TNR;
2271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 padding_info.width_padding = mSurfaceStridePadding;
2273 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002274 previewSize.width = (int32_t)newStream->width;
2275 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 }
2277 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2278 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2279 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2280 newStream->height;
2281 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2282 newStream->width;
2283 }
2284 }
2285 break;
2286 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002287 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2289 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2290 if (bUseCommonFeatureMask)
2291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2292 commonFeatureMask;
2293 else
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2295 CAM_QCOM_FEATURE_NONE;
2296 } else {
2297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2298 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2299 }
2300 break;
2301 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002302 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2304 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2305 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2307 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 /* Remove rotation if it is not supported
2309 for 4K LiveVideo snapshot case (online processing) */
2310 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2311 CAM_QCOM_FEATURE_ROTATION)) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2313 &= ~CAM_QCOM_FEATURE_ROTATION;
2314 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 } else {
2316 if (bUseCommonFeatureMask &&
2317 isOnEncoder(maxViewfinderSize, newStream->width,
2318 newStream->height)) {
2319 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2320 } else {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2322 }
2323 }
2324 if (isZsl) {
2325 if (zslStream) {
2326 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2327 (int32_t)zslStream->width;
2328 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2329 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 } else {
2333 LOGE("Error, No ZSL stream identified");
2334 pthread_mutex_unlock(&mMutex);
2335 return -EINVAL;
2336 }
2337 } else if (m_bIs4KVideo) {
2338 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2339 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2340 } else if (bYuv888OverrideJpeg) {
2341 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2342 (int32_t)largeYuv888Size.width;
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2344 (int32_t)largeYuv888Size.height;
2345 }
2346 break;
2347 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2348 case HAL_PIXEL_FORMAT_RAW16:
2349 case HAL_PIXEL_FORMAT_RAW10:
2350 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2351 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2352 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002353 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2354 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2355 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2356 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2357 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2358 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2359 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2360 gCamCapability[mCameraId]->dt[mPDIndex];
2361 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2362 gCamCapability[mCameraId]->vc[mPDIndex];
2363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 break;
2365 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2369 break;
2370 }
2371 }
2372
2373 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2374 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2375 gCamCapability[mCameraId]->color_arrangement);
2376
2377 if (newStream->priv == NULL) {
2378 //New stream, construct channel
2379 switch (newStream->stream_type) {
2380 case CAMERA3_STREAM_INPUT:
2381 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2382 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2383 break;
2384 case CAMERA3_STREAM_BIDIRECTIONAL:
2385 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2386 GRALLOC_USAGE_HW_CAMERA_WRITE;
2387 break;
2388 case CAMERA3_STREAM_OUTPUT:
2389 /* For video encoding stream, set read/write rarely
2390 * flag so that they may be set to un-cached */
2391 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2392 newStream->usage |=
2393 (GRALLOC_USAGE_SW_READ_RARELY |
2394 GRALLOC_USAGE_SW_WRITE_RARELY |
2395 GRALLOC_USAGE_HW_CAMERA_WRITE);
2396 else if (IS_USAGE_ZSL(newStream->usage))
2397 {
2398 LOGD("ZSL usage flag skipping");
2399 }
2400 else if (newStream == zslStream
2401 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2402 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2403 } else
2404 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2405 break;
2406 default:
2407 LOGE("Invalid stream_type %d", newStream->stream_type);
2408 break;
2409 }
2410
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002411 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002412 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2413 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2414 QCamera3ProcessingChannel *channel = NULL;
2415 switch (newStream->format) {
2416 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2417 if ((newStream->usage &
2418 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2419 (streamList->operation_mode ==
2420 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2421 ) {
2422 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2423 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002424 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002425 this,
2426 newStream,
2427 (cam_stream_type_t)
2428 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2430 mMetadataChannel,
2431 0); //heap buffers are not required for HFR video channel
2432 if (channel == NULL) {
2433 LOGE("allocation of channel failed");
2434 pthread_mutex_unlock(&mMutex);
2435 return -ENOMEM;
2436 }
2437 //channel->getNumBuffers() will return 0 here so use
2438 //MAX_INFLIGH_HFR_REQUESTS
2439 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2440 newStream->priv = channel;
2441 LOGI("num video buffers in HFR mode: %d",
2442 MAX_INFLIGHT_HFR_REQUESTS);
2443 } else {
2444 /* Copy stream contents in HFR preview only case to create
2445 * dummy batch channel so that sensor streaming is in
2446 * HFR mode */
2447 if (!m_bIsVideo && (streamList->operation_mode ==
2448 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2449 mDummyBatchStream = *newStream;
2450 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002451 int bufferCount = MAX_INFLIGHT_REQUESTS;
2452 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2453 CAM_STREAM_TYPE_VIDEO) {
2454 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2455 bufferCount = MAX_VIDEO_BUFFERS;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2458 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002459 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002460 this,
2461 newStream,
2462 (cam_stream_type_t)
2463 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2465 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002467 if (channel == NULL) {
2468 LOGE("allocation of channel failed");
2469 pthread_mutex_unlock(&mMutex);
2470 return -ENOMEM;
2471 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 /* disable UBWC for preview, though supported,
2473 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002474 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002475 (previewSize.width == (int32_t)videoWidth)&&
2476 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002477 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002479 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 newStream->max_buffers = channel->getNumBuffers();
2481 newStream->priv = channel;
2482 }
2483 break;
2484 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2485 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2486 mChannelHandle,
2487 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel);
2495 if (channel == NULL) {
2496 LOGE("allocation of YUV channel failed");
2497 pthread_mutex_unlock(&mMutex);
2498 return -ENOMEM;
2499 }
2500 newStream->max_buffers = channel->getNumBuffers();
2501 newStream->priv = channel;
2502 break;
2503 }
2504 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2505 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002506 case HAL_PIXEL_FORMAT_RAW10: {
2507 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2508 (HAL_DATASPACE_DEPTH != newStream->data_space))
2509 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mRawChannel = new QCamera3RawChannel(
2511 mCameraHandle->camera_handle, mChannelHandle,
2512 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002513 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002514 this, newStream,
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002516 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002517 if (mRawChannel == NULL) {
2518 LOGE("allocation of raw channel failed");
2519 pthread_mutex_unlock(&mMutex);
2520 return -ENOMEM;
2521 }
2522 newStream->max_buffers = mRawChannel->getNumBuffers();
2523 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2524 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002525 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002527 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2528 mDepthChannel = new QCamera3DepthChannel(
2529 mCameraHandle->camera_handle, mChannelHandle,
2530 mCameraHandle->ops, NULL, NULL, &padding_info,
2531 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2532 mMetadataChannel);
2533 if (NULL == mDepthChannel) {
2534 LOGE("Allocation of depth channel failed");
2535 pthread_mutex_unlock(&mMutex);
2536 return NO_MEMORY;
2537 }
2538 newStream->priv = mDepthChannel;
2539 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2540 } else {
2541 // Max live snapshot inflight buffer is 1. This is to mitigate
2542 // frame drop issues for video snapshot. The more buffers being
2543 // allocated, the more frame drops there are.
2544 mPictureChannel = new QCamera3PicChannel(
2545 mCameraHandle->camera_handle, mChannelHandle,
2546 mCameraHandle->ops, captureResultCb,
2547 setBufferErrorStatus, &padding_info, this, newStream,
2548 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2549 m_bIs4KVideo, isZsl, mMetadataChannel,
2550 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2551 if (mPictureChannel == NULL) {
2552 LOGE("allocation of channel failed");
2553 pthread_mutex_unlock(&mMutex);
2554 return -ENOMEM;
2555 }
2556 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2557 newStream->max_buffers = mPictureChannel->getNumBuffers();
2558 mPictureChannel->overrideYuvSize(
2559 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2560 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002562 break;
2563
2564 default:
2565 LOGE("not a supported format 0x%x", newStream->format);
2566 break;
2567 }
2568 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2569 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2570 } else {
2571 LOGE("Error, Unknown stream type");
2572 pthread_mutex_unlock(&mMutex);
2573 return -EINVAL;
2574 }
2575
2576 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002577 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2578 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002579 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002580 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2582 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2583 }
2584 }
2585
2586 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2587 it != mStreamInfo.end(); it++) {
2588 if ((*it)->stream == newStream) {
2589 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2590 break;
2591 }
2592 }
2593 } else {
2594 // Channel already exists for this stream
2595 // Do nothing for now
2596 }
2597 padding_info = gCamCapability[mCameraId]->padding_info;
2598
Emilian Peev7650c122017-01-19 08:24:33 -08002599 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 * since there is no real stream associated with it
2601 */
Emilian Peev7650c122017-01-19 08:24:33 -08002602 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002603 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2604 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 }
2608
Thierry Strudel2896d122017-02-23 19:18:03 -08002609 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2610 onlyRaw = false;
2611 }
2612
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002613 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002615 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002616 cam_analysis_info_t analysisInfo;
2617 int32_t ret = NO_ERROR;
2618 ret = mCommon.getAnalysisInfo(
2619 FALSE,
2620 analysisFeatureMask,
2621 &analysisInfo);
2622 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002623 cam_color_filter_arrangement_t analysis_color_arrangement =
2624 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2625 CAM_FILTER_ARRANGEMENT_Y :
2626 gCamCapability[mCameraId]->color_arrangement);
2627 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2628 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002629 cam_dimension_t analysisDim;
2630 analysisDim = mCommon.getMatchingDimension(previewSize,
2631 analysisInfo.analysis_recommended_res);
2632
2633 mAnalysisChannel = new QCamera3SupportChannel(
2634 mCameraHandle->camera_handle,
2635 mChannelHandle,
2636 mCameraHandle->ops,
2637 &analysisInfo.analysis_padding_info,
2638 analysisFeatureMask,
2639 CAM_STREAM_TYPE_ANALYSIS,
2640 &analysisDim,
2641 (analysisInfo.analysis_format
2642 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2643 : CAM_FORMAT_YUV_420_NV21),
2644 analysisInfo.hw_analysis_supported,
2645 gCamCapability[mCameraId]->color_arrangement,
2646 this,
2647 0); // force buffer count to 0
2648 } else {
2649 LOGW("getAnalysisInfo failed, ret = %d", ret);
2650 }
2651 if (!mAnalysisChannel) {
2652 LOGW("Analysis channel cannot be created");
2653 }
2654 }
2655
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 //RAW DUMP channel
2657 if (mEnableRawDump && isRawStreamRequested == false){
2658 cam_dimension_t rawDumpSize;
2659 rawDumpSize = getMaxRawSize(mCameraId);
2660 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2661 setPAAFSupport(rawDumpFeatureMask,
2662 CAM_STREAM_TYPE_RAW,
2663 gCamCapability[mCameraId]->color_arrangement);
2664 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2665 mChannelHandle,
2666 mCameraHandle->ops,
2667 rawDumpSize,
2668 &padding_info,
2669 this, rawDumpFeatureMask);
2670 if (!mRawDumpChannel) {
2671 LOGE("Raw Dump channel cannot be created");
2672 pthread_mutex_unlock(&mMutex);
2673 return -ENOMEM;
2674 }
2675 }
2676
Chien-Yu Chenee335912017-02-09 17:53:20 -08002677 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002678 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002679 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002680 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2681 "HDR+ RAW source channel is not created.",
2682 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002683 } else {
2684 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2685 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2686 setPAAFSupport(hdrPlusRawFeatureMask,
2687 CAM_STREAM_TYPE_RAW,
2688 gCamCapability[mCameraId]->color_arrangement);
2689 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2690 mChannelHandle,
2691 mCameraHandle->ops,
2692 rawSize,
2693 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002694 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002695 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002696 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002697 if (!mHdrPlusRawSrcChannel) {
2698 LOGE("HDR+ Raw Source channel cannot be created");
2699 pthread_mutex_unlock(&mMutex);
2700 return -ENOMEM;
2701 }
2702 }
2703 }
2704
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 if (mAnalysisChannel) {
2706 cam_analysis_info_t analysisInfo;
2707 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2708 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2709 CAM_STREAM_TYPE_ANALYSIS;
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2711 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002712 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2714 &analysisInfo);
2715 if (rc != NO_ERROR) {
2716 LOGE("getAnalysisInfo failed, ret = %d", rc);
2717 pthread_mutex_unlock(&mMutex);
2718 return rc;
2719 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002720 cam_color_filter_arrangement_t analysis_color_arrangement =
2721 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2722 CAM_FILTER_ARRANGEMENT_Y :
2723 gCamCapability[mCameraId]->color_arrangement);
2724 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2725 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2726 analysis_color_arrangement);
2727
Thierry Strudel3d639192016-09-09 11:52:26 -07002728 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002729 mCommon.getMatchingDimension(previewSize,
2730 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 mStreamConfigInfo.num_streams++;
2732 }
2733
Thierry Strudel2896d122017-02-23 19:18:03 -08002734 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 cam_analysis_info_t supportInfo;
2736 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2737 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2738 setPAAFSupport(callbackFeatureMask,
2739 CAM_STREAM_TYPE_CALLBACK,
2740 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002741 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002742 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002743 if (ret != NO_ERROR) {
2744 /* Ignore the error for Mono camera
2745 * because the PAAF bit mask is only set
2746 * for CAM_STREAM_TYPE_ANALYSIS stream type
2747 */
2748 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2749 LOGW("getAnalysisInfo failed, ret = %d", ret);
2750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 }
2752 mSupportChannel = new QCamera3SupportChannel(
2753 mCameraHandle->camera_handle,
2754 mChannelHandle,
2755 mCameraHandle->ops,
2756 &gCamCapability[mCameraId]->padding_info,
2757 callbackFeatureMask,
2758 CAM_STREAM_TYPE_CALLBACK,
2759 &QCamera3SupportChannel::kDim,
2760 CAM_FORMAT_YUV_420_NV21,
2761 supportInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002763 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 if (!mSupportChannel) {
2765 LOGE("dummy channel cannot be created");
2766 pthread_mutex_unlock(&mMutex);
2767 return -ENOMEM;
2768 }
2769 }
2770
2771 if (mSupportChannel) {
2772 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2773 QCamera3SupportChannel::kDim;
2774 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2775 CAM_STREAM_TYPE_CALLBACK;
2776 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2777 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2778 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2779 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mStreamConfigInfo.num_streams++;
2782 }
2783
2784 if (mRawDumpChannel) {
2785 cam_dimension_t rawSize;
2786 rawSize = getMaxRawSize(mCameraId);
2787 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2788 rawSize;
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2790 CAM_STREAM_TYPE_RAW;
2791 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2792 CAM_QCOM_FEATURE_NONE;
2793 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2794 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2795 gCamCapability[mCameraId]->color_arrangement);
2796 mStreamConfigInfo.num_streams++;
2797 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002798
2799 if (mHdrPlusRawSrcChannel) {
2800 cam_dimension_t rawSize;
2801 rawSize = getMaxRawSize(mCameraId);
2802 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2803 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2804 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2805 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2806 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2807 gCamCapability[mCameraId]->color_arrangement);
2808 mStreamConfigInfo.num_streams++;
2809 }
2810
Thierry Strudel3d639192016-09-09 11:52:26 -07002811 /* In HFR mode, if video stream is not added, create a dummy channel so that
2812 * ISP can create a batch mode even for preview only case. This channel is
2813 * never 'start'ed (no stream-on), it is only 'initialized' */
2814 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2815 !m_bIsVideo) {
2816 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2817 setPAAFSupport(dummyFeatureMask,
2818 CAM_STREAM_TYPE_VIDEO,
2819 gCamCapability[mCameraId]->color_arrangement);
2820 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2821 mChannelHandle,
2822 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002823 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 this,
2825 &mDummyBatchStream,
2826 CAM_STREAM_TYPE_VIDEO,
2827 dummyFeatureMask,
2828 mMetadataChannel);
2829 if (NULL == mDummyBatchChannel) {
2830 LOGE("creation of mDummyBatchChannel failed."
2831 "Preview will use non-hfr sensor mode ");
2832 }
2833 }
2834 if (mDummyBatchChannel) {
2835 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2836 mDummyBatchStream.width;
2837 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2838 mDummyBatchStream.height;
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2840 CAM_STREAM_TYPE_VIDEO;
2841 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2842 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2843 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2844 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2845 gCamCapability[mCameraId]->color_arrangement);
2846 mStreamConfigInfo.num_streams++;
2847 }
2848
2849 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2850 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002851 m_bIs4KVideo ? 0 :
2852 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002853
2854 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2855 for (pendingRequestIterator i = mPendingRequestsList.begin();
2856 i != mPendingRequestsList.end();) {
2857 i = erasePendingRequest(i);
2858 }
2859 mPendingFrameDropList.clear();
2860 // Initialize/Reset the pending buffers list
2861 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2862 req.mPendingBufferList.clear();
2863 }
2864 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2865
Thierry Strudel3d639192016-09-09 11:52:26 -07002866 mCurJpegMeta.clear();
2867 //Get min frame duration for this streams configuration
2868 deriveMinFrameDuration();
2869
Chien-Yu Chenee335912017-02-09 17:53:20 -08002870 mFirstPreviewIntentSeen = false;
2871
2872 // Disable HRD+ if it's enabled;
2873 disableHdrPlusModeLocked();
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 // Update state
2876 mState = CONFIGURED;
2877
2878 pthread_mutex_unlock(&mMutex);
2879
2880 return rc;
2881}
2882
2883/*===========================================================================
2884 * FUNCTION : validateCaptureRequest
2885 *
2886 * DESCRIPTION: validate a capture request from camera service
2887 *
2888 * PARAMETERS :
2889 * @request : request from framework to process
2890 *
2891 * RETURN :
2892 *
2893 *==========================================================================*/
2894int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002895 camera3_capture_request_t *request,
2896 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002897{
2898 ssize_t idx = 0;
2899 const camera3_stream_buffer_t *b;
2900 CameraMetadata meta;
2901
2902 /* Sanity check the request */
2903 if (request == NULL) {
2904 LOGE("NULL capture request");
2905 return BAD_VALUE;
2906 }
2907
2908 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2909 /*settings cannot be null for the first request*/
2910 return BAD_VALUE;
2911 }
2912
2913 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002914 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2915 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002916 LOGE("Request %d: No output buffers provided!",
2917 __FUNCTION__, frameNumber);
2918 return BAD_VALUE;
2919 }
2920 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2921 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2922 request->num_output_buffers, MAX_NUM_STREAMS);
2923 return BAD_VALUE;
2924 }
2925 if (request->input_buffer != NULL) {
2926 b = request->input_buffer;
2927 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2928 LOGE("Request %d: Buffer %ld: Status not OK!",
2929 frameNumber, (long)idx);
2930 return BAD_VALUE;
2931 }
2932 if (b->release_fence != -1) {
2933 LOGE("Request %d: Buffer %ld: Has a release fence!",
2934 frameNumber, (long)idx);
2935 return BAD_VALUE;
2936 }
2937 if (b->buffer == NULL) {
2938 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2939 frameNumber, (long)idx);
2940 return BAD_VALUE;
2941 }
2942 }
2943
2944 // Validate all buffers
2945 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002946 if (b == NULL) {
2947 return BAD_VALUE;
2948 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002949 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002950 QCamera3ProcessingChannel *channel =
2951 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2952 if (channel == NULL) {
2953 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2954 frameNumber, (long)idx);
2955 return BAD_VALUE;
2956 }
2957 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2958 LOGE("Request %d: Buffer %ld: Status not OK!",
2959 frameNumber, (long)idx);
2960 return BAD_VALUE;
2961 }
2962 if (b->release_fence != -1) {
2963 LOGE("Request %d: Buffer %ld: Has a release fence!",
2964 frameNumber, (long)idx);
2965 return BAD_VALUE;
2966 }
2967 if (b->buffer == NULL) {
2968 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2969 frameNumber, (long)idx);
2970 return BAD_VALUE;
2971 }
2972 if (*(b->buffer) == NULL) {
2973 LOGE("Request %d: Buffer %ld: NULL private handle!",
2974 frameNumber, (long)idx);
2975 return BAD_VALUE;
2976 }
2977 idx++;
2978 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002980 return NO_ERROR;
2981}
2982
2983/*===========================================================================
2984 * FUNCTION : deriveMinFrameDuration
2985 *
2986 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2987 * on currently configured streams.
2988 *
2989 * PARAMETERS : NONE
2990 *
2991 * RETURN : NONE
2992 *
2993 *==========================================================================*/
2994void QCamera3HardwareInterface::deriveMinFrameDuration()
2995{
2996 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2997
2998 maxJpegDim = 0;
2999 maxProcessedDim = 0;
3000 maxRawDim = 0;
3001
3002 // Figure out maximum jpeg, processed, and raw dimensions
3003 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3004 it != mStreamInfo.end(); it++) {
3005
3006 // Input stream doesn't have valid stream_type
3007 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3008 continue;
3009
3010 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3011 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3012 if (dimension > maxJpegDim)
3013 maxJpegDim = dimension;
3014 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3015 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3016 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3017 if (dimension > maxRawDim)
3018 maxRawDim = dimension;
3019 } else {
3020 if (dimension > maxProcessedDim)
3021 maxProcessedDim = dimension;
3022 }
3023 }
3024
3025 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3026 MAX_SIZES_CNT);
3027
3028 //Assume all jpeg dimensions are in processed dimensions.
3029 if (maxJpegDim > maxProcessedDim)
3030 maxProcessedDim = maxJpegDim;
3031 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3032 if (maxProcessedDim > maxRawDim) {
3033 maxRawDim = INT32_MAX;
3034
3035 for (size_t i = 0; i < count; i++) {
3036 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3037 gCamCapability[mCameraId]->raw_dim[i].height;
3038 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3039 maxRawDim = dimension;
3040 }
3041 }
3042
3043 //Find minimum durations for processed, jpeg, and raw
3044 for (size_t i = 0; i < count; i++) {
3045 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3046 gCamCapability[mCameraId]->raw_dim[i].height) {
3047 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3048 break;
3049 }
3050 }
3051 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3052 for (size_t i = 0; i < count; i++) {
3053 if (maxProcessedDim ==
3054 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3055 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3056 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3057 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3058 break;
3059 }
3060 }
3061}
3062
3063/*===========================================================================
3064 * FUNCTION : getMinFrameDuration
3065 *
3066 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3067 * and current request configuration.
3068 *
3069 * PARAMETERS : @request: requset sent by the frameworks
3070 *
3071 * RETURN : min farme duration for a particular request
3072 *
3073 *==========================================================================*/
3074int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3075{
3076 bool hasJpegStream = false;
3077 bool hasRawStream = false;
3078 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3079 const camera3_stream_t *stream = request->output_buffers[i].stream;
3080 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3081 hasJpegStream = true;
3082 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3083 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3084 stream->format == HAL_PIXEL_FORMAT_RAW16)
3085 hasRawStream = true;
3086 }
3087
3088 if (!hasJpegStream)
3089 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3090 else
3091 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3092}
3093
3094/*===========================================================================
3095 * FUNCTION : handleBuffersDuringFlushLock
3096 *
3097 * DESCRIPTION: Account for buffers returned from back-end during flush
3098 * This function is executed while mMutex is held by the caller.
3099 *
3100 * PARAMETERS :
3101 * @buffer: image buffer for the callback
3102 *
3103 * RETURN :
3104 *==========================================================================*/
3105void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3106{
3107 bool buffer_found = false;
3108 for (List<PendingBuffersInRequest>::iterator req =
3109 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3110 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3111 for (List<PendingBufferInfo>::iterator i =
3112 req->mPendingBufferList.begin();
3113 i != req->mPendingBufferList.end(); i++) {
3114 if (i->buffer == buffer->buffer) {
3115 mPendingBuffersMap.numPendingBufsAtFlush--;
3116 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3117 buffer->buffer, req->frame_number,
3118 mPendingBuffersMap.numPendingBufsAtFlush);
3119 buffer_found = true;
3120 break;
3121 }
3122 }
3123 if (buffer_found) {
3124 break;
3125 }
3126 }
3127 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3128 //signal the flush()
3129 LOGD("All buffers returned to HAL. Continue flush");
3130 pthread_cond_signal(&mBuffersCond);
3131 }
3132}
3133
Thierry Strudel3d639192016-09-09 11:52:26 -07003134/*===========================================================================
3135 * FUNCTION : handleBatchMetadata
3136 *
3137 * DESCRIPTION: Handles metadata buffer callback in batch mode
3138 *
3139 * PARAMETERS : @metadata_buf: metadata buffer
3140 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3141 * the meta buf in this method
3142 *
3143 * RETURN :
3144 *
3145 *==========================================================================*/
3146void QCamera3HardwareInterface::handleBatchMetadata(
3147 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3148{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003149 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003150
3151 if (NULL == metadata_buf) {
3152 LOGE("metadata_buf is NULL");
3153 return;
3154 }
3155 /* In batch mode, the metdata will contain the frame number and timestamp of
3156 * the last frame in the batch. Eg: a batch containing buffers from request
3157 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3158 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3159 * multiple process_capture_results */
3160 metadata_buffer_t *metadata =
3161 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3162 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3163 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3164 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3165 uint32_t frame_number = 0, urgent_frame_number = 0;
3166 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3167 bool invalid_metadata = false;
3168 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3169 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003170 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003171
3172 int32_t *p_frame_number_valid =
3173 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3174 uint32_t *p_frame_number =
3175 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3176 int64_t *p_capture_time =
3177 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3178 int32_t *p_urgent_frame_number_valid =
3179 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3180 uint32_t *p_urgent_frame_number =
3181 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3182
3183 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3184 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3185 (NULL == p_urgent_frame_number)) {
3186 LOGE("Invalid metadata");
3187 invalid_metadata = true;
3188 } else {
3189 frame_number_valid = *p_frame_number_valid;
3190 last_frame_number = *p_frame_number;
3191 last_frame_capture_time = *p_capture_time;
3192 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3193 last_urgent_frame_number = *p_urgent_frame_number;
3194 }
3195
3196 /* In batchmode, when no video buffers are requested, set_parms are sent
3197 * for every capture_request. The difference between consecutive urgent
3198 * frame numbers and frame numbers should be used to interpolate the
3199 * corresponding frame numbers and time stamps */
3200 pthread_mutex_lock(&mMutex);
3201 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003202 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3203 if(idx < 0) {
3204 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3205 last_urgent_frame_number);
3206 mState = ERROR;
3207 pthread_mutex_unlock(&mMutex);
3208 return;
3209 }
3210 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003211 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3212 first_urgent_frame_number;
3213
3214 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3215 urgent_frame_number_valid,
3216 first_urgent_frame_number, last_urgent_frame_number);
3217 }
3218
3219 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003220 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3221 if(idx < 0) {
3222 LOGE("Invalid frame number received: %d. Irrecoverable error",
3223 last_frame_number);
3224 mState = ERROR;
3225 pthread_mutex_unlock(&mMutex);
3226 return;
3227 }
3228 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003229 frameNumDiff = last_frame_number + 1 -
3230 first_frame_number;
3231 mPendingBatchMap.removeItem(last_frame_number);
3232
3233 LOGD("frm: valid: %d frm_num: %d - %d",
3234 frame_number_valid,
3235 first_frame_number, last_frame_number);
3236
3237 }
3238 pthread_mutex_unlock(&mMutex);
3239
3240 if (urgent_frame_number_valid || frame_number_valid) {
3241 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3242 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3243 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3244 urgentFrameNumDiff, last_urgent_frame_number);
3245 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3246 LOGE("frameNumDiff: %d frameNum: %d",
3247 frameNumDiff, last_frame_number);
3248 }
3249
3250 for (size_t i = 0; i < loopCount; i++) {
3251 /* handleMetadataWithLock is called even for invalid_metadata for
3252 * pipeline depth calculation */
3253 if (!invalid_metadata) {
3254 /* Infer frame number. Batch metadata contains frame number of the
3255 * last frame */
3256 if (urgent_frame_number_valid) {
3257 if (i < urgentFrameNumDiff) {
3258 urgent_frame_number =
3259 first_urgent_frame_number + i;
3260 LOGD("inferred urgent frame_number: %d",
3261 urgent_frame_number);
3262 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3263 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3264 } else {
3265 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3266 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3267 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3268 }
3269 }
3270
3271 /* Infer frame number. Batch metadata contains frame number of the
3272 * last frame */
3273 if (frame_number_valid) {
3274 if (i < frameNumDiff) {
3275 frame_number = first_frame_number + i;
3276 LOGD("inferred frame_number: %d", frame_number);
3277 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3278 CAM_INTF_META_FRAME_NUMBER, frame_number);
3279 } else {
3280 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3281 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3282 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3283 }
3284 }
3285
3286 if (last_frame_capture_time) {
3287 //Infer timestamp
3288 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003289 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003291 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003292 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3293 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3294 LOGD("batch capture_time: %lld, capture_time: %lld",
3295 last_frame_capture_time, capture_time);
3296 }
3297 }
3298 pthread_mutex_lock(&mMutex);
3299 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003300 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003301 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3302 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003303 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003304 pthread_mutex_unlock(&mMutex);
3305 }
3306
3307 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003308 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003309 mMetadataChannel->bufDone(metadata_buf);
3310 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003311 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003312 }
3313}
3314
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003315void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3316 camera3_error_msg_code_t errorCode)
3317{
3318 camera3_notify_msg_t notify_msg;
3319 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3320 notify_msg.type = CAMERA3_MSG_ERROR;
3321 notify_msg.message.error.error_code = errorCode;
3322 notify_msg.message.error.error_stream = NULL;
3323 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003324 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003325
3326 return;
3327}
Thierry Strudel3d639192016-09-09 11:52:26 -07003328/*===========================================================================
3329 * FUNCTION : handleMetadataWithLock
3330 *
3331 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3332 *
3333 * PARAMETERS : @metadata_buf: metadata buffer
3334 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3335 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003336 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3337 * last urgent metadata in a batch. Always true for non-batch mode
3338 * @lastMetadataInBatch: Boolean to indicate whether this is the
3339 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3341 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003342 *
3343 * RETURN :
3344 *
3345 *==========================================================================*/
3346void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003347 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003348 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3349 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3353 //during flush do not send metadata from this thread
3354 LOGD("not sending metadata during flush or when mState is error");
3355 if (free_and_bufdone_meta_buf) {
3356 mMetadataChannel->bufDone(metadata_buf);
3357 free(metadata_buf);
3358 }
3359 return;
3360 }
3361
3362 //not in flush
3363 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3364 int32_t frame_number_valid, urgent_frame_number_valid;
3365 uint32_t frame_number, urgent_frame_number;
3366 int64_t capture_time;
3367 nsecs_t currentSysTime;
3368
3369 int32_t *p_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3372 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3373 int32_t *p_urgent_frame_number_valid =
3374 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3375 uint32_t *p_urgent_frame_number =
3376 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3377 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3378 metadata) {
3379 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3380 *p_frame_number_valid, *p_frame_number);
3381 }
3382
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003383 camera_metadata_t *resultMetadata = nullptr;
3384
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3386 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3387 LOGE("Invalid metadata");
3388 if (free_and_bufdone_meta_buf) {
3389 mMetadataChannel->bufDone(metadata_buf);
3390 free(metadata_buf);
3391 }
3392 goto done_metadata;
3393 }
3394 frame_number_valid = *p_frame_number_valid;
3395 frame_number = *p_frame_number;
3396 capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 urgent_frame_number = *p_urgent_frame_number;
3399 currentSysTime = systemTime(CLOCK_MONOTONIC);
3400
3401 // Detect if buffers from any requests are overdue
3402 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003403 int64_t timeout;
3404 {
3405 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3406 // If there is a pending HDR+ request, the following requests may be blocked until the
3407 // HDR+ request is done. So allow a longer timeout.
3408 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3409 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3410 }
3411
3412 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003413 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003414 assert(missed.stream->priv);
3415 if (missed.stream->priv) {
3416 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3417 assert(ch->mStreams[0]);
3418 if (ch->mStreams[0]) {
3419 LOGE("Cancel missing frame = %d, buffer = %p,"
3420 "stream type = %d, stream format = %d",
3421 req.frame_number, missed.buffer,
3422 ch->mStreams[0]->getMyType(), missed.stream->format);
3423 ch->timeoutFrame(req.frame_number);
3424 }
3425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 }
3427 }
3428 }
3429 //Partial result on process_capture_result for timestamp
3430 if (urgent_frame_number_valid) {
3431 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3432 urgent_frame_number, capture_time);
3433
3434 //Recieved an urgent Frame Number, handle it
3435 //using partial results
3436 for (pendingRequestIterator i =
3437 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3438 LOGD("Iterator Frame = %d urgent frame = %d",
3439 i->frame_number, urgent_frame_number);
3440
3441 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3442 (i->partial_result_cnt == 0)) {
3443 LOGE("Error: HAL missed urgent metadata for frame number %d",
3444 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003445 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003446 }
3447
3448 if (i->frame_number == urgent_frame_number &&
3449 i->bUrgentReceived == 0) {
3450
3451 camera3_capture_result_t result;
3452 memset(&result, 0, sizeof(camera3_capture_result_t));
3453
3454 i->partial_result_cnt++;
3455 i->bUrgentReceived = 1;
3456 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003457 result.result = translateCbUrgentMetadataToResultMetadata(
3458 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 // Populate metadata result
3460 result.frame_number = urgent_frame_number;
3461 result.num_output_buffers = 0;
3462 result.output_buffers = NULL;
3463 result.partial_result = i->partial_result_cnt;
3464
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003465 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003466 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003467 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003468 result.partial_result == PARTIAL_RESULT_COUNT);
3469 }
3470
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003471 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003472 LOGD("urgent frame_number = %u, capture_time = %lld",
3473 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003474 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3475 // Instant AEC settled for this frame.
3476 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3477 mInstantAECSettledFrameNumber = urgent_frame_number;
3478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 free_camera_metadata((camera_metadata_t *)result.result);
3480 break;
3481 }
3482 }
3483 }
3484
3485 if (!frame_number_valid) {
3486 LOGD("Not a valid normal frame number, used as SOF only");
3487 if (free_and_bufdone_meta_buf) {
3488 mMetadataChannel->bufDone(metadata_buf);
3489 free(metadata_buf);
3490 }
3491 goto done_metadata;
3492 }
3493 LOGH("valid frame_number = %u, capture_time = %lld",
3494 frame_number, capture_time);
3495
Emilian Peev7650c122017-01-19 08:24:33 -08003496 if (metadata->is_depth_data_valid) {
3497 handleDepthDataLocked(metadata->depth_data, frame_number);
3498 }
3499
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003500 // Check whether any stream buffer corresponding to this is dropped or not
3501 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3502 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3503 for (auto & pendingRequest : mPendingRequestsList) {
3504 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3505 mInstantAECSettledFrameNumber)) {
3506 camera3_notify_msg_t notify_msg = {};
3507 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003508 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003509 QCamera3ProcessingChannel *channel =
3510 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003511 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003512 if (p_cam_frame_drop) {
3513 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003514 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003515 // Got the stream ID for drop frame.
3516 dropFrame = true;
3517 break;
3518 }
3519 }
3520 } else {
3521 // This is instant AEC case.
3522 // For instant AEC drop the stream untill AEC is settled.
3523 dropFrame = true;
3524 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003525
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003526 if (dropFrame) {
3527 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3528 if (p_cam_frame_drop) {
3529 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003531 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003532 } else {
3533 // For instant AEC, inform frame drop and frame number
3534 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3535 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003536 pendingRequest.frame_number, streamID,
3537 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003538 }
3539 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003540 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003541 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003542 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003543 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003544 if (p_cam_frame_drop) {
3545 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003546 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 } else {
3549 // For instant AEC, inform frame drop and frame number
3550 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3551 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003552 pendingRequest.frame_number, streamID,
3553 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 }
3555 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003556 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 PendingFrameDrop.stream_ID = streamID;
3558 // Add the Frame drop info to mPendingFrameDropList
3559 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 }
3562 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003565 for (auto & pendingRequest : mPendingRequestsList) {
3566 // Find the pending request with the frame number.
3567 if (pendingRequest.frame_number == frame_number) {
3568 // Update the sensor timestamp.
3569 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003570
Thierry Strudel3d639192016-09-09 11:52:26 -07003571
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003572 /* Set the timestamp in display metadata so that clients aware of
3573 private_handle such as VT can use this un-modified timestamps.
3574 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003575 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003576
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 // Find channel requiring metadata, meaning internal offline postprocess
3578 // is needed.
3579 //TODO: for now, we don't support two streams requiring metadata at the same time.
3580 // (because we are not making copies, and metadata buffer is not reference counted.
3581 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3583 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 if (iter->need_metadata) {
3585 internalPproc = true;
3586 QCamera3ProcessingChannel *channel =
3587 (QCamera3ProcessingChannel *)iter->stream->priv;
3588 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003589 if(p_is_metabuf_queued != NULL) {
3590 *p_is_metabuf_queued = true;
3591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 break;
3593 }
3594 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003595 for (auto itr = pendingRequest.internalRequestList.begin();
3596 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003597 if (itr->need_metadata) {
3598 internalPproc = true;
3599 QCamera3ProcessingChannel *channel =
3600 (QCamera3ProcessingChannel *)itr->stream->priv;
3601 channel->queueReprocMetadata(metadata_buf);
3602 break;
3603 }
3604 }
3605
Thierry Strudel54dc9782017-02-15 12:12:10 -08003606 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003607 resultMetadata = translateFromHalMetadata(metadata,
3608 pendingRequest.timestamp, pendingRequest.request_id,
3609 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3610 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003611 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003612 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003613 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003614 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003615 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003616 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003618 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003619
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003620 if (pendingRequest.blob_request) {
3621 //Dump tuning metadata if enabled and available
3622 char prop[PROPERTY_VALUE_MAX];
3623 memset(prop, 0, sizeof(prop));
3624 property_get("persist.camera.dumpmetadata", prop, "0");
3625 int32_t enabled = atoi(prop);
3626 if (enabled && metadata->is_tuning_params_valid) {
3627 dumpMetadataToFile(metadata->tuning_params,
3628 mMetaFrameCount,
3629 enabled,
3630 "Snapshot",
3631 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003632 }
3633 }
3634
3635 if (!internalPproc) {
3636 LOGD("couldn't find need_metadata for this metadata");
3637 // Return metadata buffer
3638 if (free_and_bufdone_meta_buf) {
3639 mMetadataChannel->bufDone(metadata_buf);
3640 free(metadata_buf);
3641 }
3642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003643
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 }
3646 }
3647
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 // Try to send out shutter callbacks and capture results.
3649 handlePendingResultsWithLock(frame_number, resultMetadata);
3650 return;
3651
Thierry Strudel3d639192016-09-09 11:52:26 -07003652done_metadata:
3653 for (pendingRequestIterator i = mPendingRequestsList.begin();
3654 i != mPendingRequestsList.end() ;i++) {
3655 i->pipeline_depth++;
3656 }
3657 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3658 unblockRequestIfNecessary();
3659}
3660
3661/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003662 * FUNCTION : handleDepthDataWithLock
3663 *
3664 * DESCRIPTION: Handles incoming depth data
3665 *
3666 * PARAMETERS : @depthData : Depth data
3667 * @frameNumber: Frame number of the incoming depth data
3668 *
3669 * RETURN :
3670 *
3671 *==========================================================================*/
3672void QCamera3HardwareInterface::handleDepthDataLocked(
3673 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3674 uint32_t currentFrameNumber;
3675 buffer_handle_t *depthBuffer;
3676
3677 if (nullptr == mDepthChannel) {
3678 LOGE("Depth channel not present!");
3679 return;
3680 }
3681
3682 camera3_stream_buffer_t resultBuffer =
3683 {.acquire_fence = -1,
3684 .release_fence = -1,
3685 .status = CAMERA3_BUFFER_STATUS_OK,
3686 .buffer = nullptr,
3687 .stream = mDepthChannel->getStream()};
3688 camera3_capture_result_t result =
3689 {.result = nullptr,
3690 .num_output_buffers = 1,
3691 .output_buffers = &resultBuffer,
3692 .partial_result = 0,
3693 .frame_number = 0};
3694
3695 do {
3696 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3697 if (nullptr == depthBuffer) {
3698 break;
3699 }
3700
3701 result.frame_number = currentFrameNumber;
3702 resultBuffer.buffer = depthBuffer;
3703 if (currentFrameNumber == frameNumber) {
3704 int32_t rc = mDepthChannel->populateDepthData(depthData,
3705 frameNumber);
3706 if (NO_ERROR != rc) {
3707 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3708 } else {
3709 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3710 }
3711 } else if (currentFrameNumber > frameNumber) {
3712 break;
3713 } else {
3714 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3715 {{currentFrameNumber, mDepthChannel->getStream(),
3716 CAMERA3_MSG_ERROR_BUFFER}}};
3717 orchestrateNotify(&notify_msg);
3718
3719 LOGE("Depth buffer for frame number: %d is missing "
3720 "returning back!", currentFrameNumber);
3721 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3722 }
3723 mDepthChannel->unmapBuffer(currentFrameNumber);
3724
3725 orchestrateResult(&result);
3726 } while (currentFrameNumber < frameNumber);
3727}
3728
3729/*===========================================================================
3730 * FUNCTION : notifyErrorFoPendingDepthData
3731 *
3732 * DESCRIPTION: Returns error for any pending depth buffers
3733 *
3734 * PARAMETERS : depthCh - depth channel that needs to get flushed
3735 *
3736 * RETURN :
3737 *
3738 *==========================================================================*/
3739void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3740 QCamera3DepthChannel *depthCh) {
3741 uint32_t currentFrameNumber;
3742 buffer_handle_t *depthBuffer;
3743
3744 if (nullptr == depthCh) {
3745 return;
3746 }
3747
3748 camera3_notify_msg_t notify_msg =
3749 {.type = CAMERA3_MSG_ERROR,
3750 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3751 camera3_stream_buffer_t resultBuffer =
3752 {.acquire_fence = -1,
3753 .release_fence = -1,
3754 .buffer = nullptr,
3755 .stream = depthCh->getStream(),
3756 .status = CAMERA3_BUFFER_STATUS_ERROR};
3757 camera3_capture_result_t result =
3758 {.result = nullptr,
3759 .frame_number = 0,
3760 .num_output_buffers = 1,
3761 .partial_result = 0,
3762 .output_buffers = &resultBuffer};
3763
3764 while (nullptr !=
3765 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3766 depthCh->unmapBuffer(currentFrameNumber);
3767
3768 notify_msg.message.error.frame_number = currentFrameNumber;
3769 orchestrateNotify(&notify_msg);
3770
3771 resultBuffer.buffer = depthBuffer;
3772 result.frame_number = currentFrameNumber;
3773 orchestrateResult(&result);
3774 };
3775}
3776
3777/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003778 * FUNCTION : hdrPlusPerfLock
3779 *
3780 * DESCRIPTION: perf lock for HDR+ using custom intent
3781 *
3782 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3783 *
3784 * RETURN : None
3785 *
3786 *==========================================================================*/
3787void QCamera3HardwareInterface::hdrPlusPerfLock(
3788 mm_camera_super_buf_t *metadata_buf)
3789{
3790 if (NULL == metadata_buf) {
3791 LOGE("metadata_buf is NULL");
3792 return;
3793 }
3794 metadata_buffer_t *metadata =
3795 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3796 int32_t *p_frame_number_valid =
3797 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3798 uint32_t *p_frame_number =
3799 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3800
3801 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3802 LOGE("%s: Invalid metadata", __func__);
3803 return;
3804 }
3805
3806 //acquire perf lock for 5 sec after the last HDR frame is captured
3807 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3808 if ((p_frame_number != NULL) &&
3809 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003810 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003811 }
3812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813}
3814
3815/*===========================================================================
3816 * FUNCTION : handleInputBufferWithLock
3817 *
3818 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3819 *
3820 * PARAMETERS : @frame_number: frame number of the input buffer
3821 *
3822 * RETURN :
3823 *
3824 *==========================================================================*/
3825void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3826{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003827 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003828 pendingRequestIterator i = mPendingRequestsList.begin();
3829 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3830 i++;
3831 }
3832 if (i != mPendingRequestsList.end() && i->input_buffer) {
3833 //found the right request
3834 if (!i->shutter_notified) {
3835 CameraMetadata settings;
3836 camera3_notify_msg_t notify_msg;
3837 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3838 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3839 if(i->settings) {
3840 settings = i->settings;
3841 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3842 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3843 } else {
3844 LOGE("No timestamp in input settings! Using current one.");
3845 }
3846 } else {
3847 LOGE("Input settings missing!");
3848 }
3849
3850 notify_msg.type = CAMERA3_MSG_SHUTTER;
3851 notify_msg.message.shutter.frame_number = frame_number;
3852 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003853 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003854 i->shutter_notified = true;
3855 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3856 i->frame_number, notify_msg.message.shutter.timestamp);
3857 }
3858
3859 if (i->input_buffer->release_fence != -1) {
3860 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3861 close(i->input_buffer->release_fence);
3862 if (rc != OK) {
3863 LOGE("input buffer sync wait failed %d", rc);
3864 }
3865 }
3866
3867 camera3_capture_result result;
3868 memset(&result, 0, sizeof(camera3_capture_result));
3869 result.frame_number = frame_number;
3870 result.result = i->settings;
3871 result.input_buffer = i->input_buffer;
3872 result.partial_result = PARTIAL_RESULT_COUNT;
3873
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003874 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875 LOGD("Input request metadata and input buffer frame_number = %u",
3876 i->frame_number);
3877 i = erasePendingRequest(i);
3878 } else {
3879 LOGE("Could not find input request for frame number %d", frame_number);
3880 }
3881}
3882
3883/*===========================================================================
3884 * FUNCTION : handleBufferWithLock
3885 *
3886 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3887 *
3888 * PARAMETERS : @buffer: image buffer for the callback
3889 * @frame_number: frame number of the image buffer
3890 *
3891 * RETURN :
3892 *
3893 *==========================================================================*/
3894void QCamera3HardwareInterface::handleBufferWithLock(
3895 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3896{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003897 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003898
3899 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3900 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3901 }
3902
Thierry Strudel3d639192016-09-09 11:52:26 -07003903 /* Nothing to be done during error state */
3904 if ((ERROR == mState) || (DEINIT == mState)) {
3905 return;
3906 }
3907 if (mFlushPerf) {
3908 handleBuffersDuringFlushLock(buffer);
3909 return;
3910 }
3911 //not in flush
3912 // If the frame number doesn't exist in the pending request list,
3913 // directly send the buffer to the frameworks, and update pending buffers map
3914 // Otherwise, book-keep the buffer.
3915 pendingRequestIterator i = mPendingRequestsList.begin();
3916 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3917 i++;
3918 }
3919 if (i == mPendingRequestsList.end()) {
3920 // Verify all pending requests frame_numbers are greater
3921 for (pendingRequestIterator j = mPendingRequestsList.begin();
3922 j != mPendingRequestsList.end(); j++) {
3923 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3924 LOGW("Error: pending live frame number %d is smaller than %d",
3925 j->frame_number, frame_number);
3926 }
3927 }
3928 camera3_capture_result_t result;
3929 memset(&result, 0, sizeof(camera3_capture_result_t));
3930 result.result = NULL;
3931 result.frame_number = frame_number;
3932 result.num_output_buffers = 1;
3933 result.partial_result = 0;
3934 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3935 m != mPendingFrameDropList.end(); m++) {
3936 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3937 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3938 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3939 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3940 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3941 frame_number, streamID);
3942 m = mPendingFrameDropList.erase(m);
3943 break;
3944 }
3945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003946 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003947 result.output_buffers = buffer;
3948 LOGH("result frame_number = %d, buffer = %p",
3949 frame_number, buffer->buffer);
3950
3951 mPendingBuffersMap.removeBuf(buffer->buffer);
3952
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003953 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003954 } else {
3955 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003956 if (i->input_buffer->release_fence != -1) {
3957 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3958 close(i->input_buffer->release_fence);
3959 if (rc != OK) {
3960 LOGE("input buffer sync wait failed %d", rc);
3961 }
3962 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003963 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003964
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003965 // Put buffer into the pending request
3966 for (auto &requestedBuffer : i->buffers) {
3967 if (requestedBuffer.stream == buffer->stream) {
3968 if (requestedBuffer.buffer != nullptr) {
3969 LOGE("Error: buffer is already set");
3970 } else {
3971 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3972 sizeof(camera3_stream_buffer_t));
3973 *(requestedBuffer.buffer) = *buffer;
3974 LOGH("cache buffer %p at result frame_number %u",
3975 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003976 }
3977 }
3978 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003979
3980 if (i->input_buffer) {
3981 // For a reprocessing request, try to send out shutter callback and result metadata.
3982 handlePendingResultsWithLock(frame_number, nullptr);
3983 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003984 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003985
3986 if (mPreviewStarted == false) {
3987 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3988 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3989 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3990 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3991 mPreviewStarted = true;
3992
3993 // Set power hint for preview
3994 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3995 }
3996 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003997}
3998
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003999void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4000 const camera_metadata_t *resultMetadata)
4001{
4002 // Find the pending request for this result metadata.
4003 auto requestIter = mPendingRequestsList.begin();
4004 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4005 requestIter++;
4006 }
4007
4008 if (requestIter == mPendingRequestsList.end()) {
4009 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4010 return;
4011 }
4012
4013 // Update the result metadata
4014 requestIter->resultMetadata = resultMetadata;
4015
4016 // Check what type of request this is.
4017 bool liveRequest = false;
4018 if (requestIter->hdrplus) {
4019 // HDR+ request doesn't have partial results.
4020 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4021 } else if (requestIter->input_buffer != nullptr) {
4022 // Reprocessing request result is the same as settings.
4023 requestIter->resultMetadata = requestIter->settings;
4024 // Reprocessing request doesn't have partial results.
4025 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4026 } else {
4027 liveRequest = true;
4028 requestIter->partial_result_cnt++;
4029 mPendingLiveRequest--;
4030
4031 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004032 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4033 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004034 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4035 }
4036 }
4037
4038 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4039 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4040 bool readyToSend = true;
4041
4042 // Iterate through the pending requests to send out shutter callbacks and results that are
4043 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4044 // live requests that don't have result metadata yet.
4045 auto iter = mPendingRequestsList.begin();
4046 while (iter != mPendingRequestsList.end()) {
4047 // Check if current pending request is ready. If it's not ready, the following pending
4048 // requests are also not ready.
4049 if (readyToSend && iter->resultMetadata == nullptr) {
4050 readyToSend = false;
4051 }
4052
4053 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4054
4055 std::vector<camera3_stream_buffer_t> outputBuffers;
4056
4057 camera3_capture_result_t result = {};
4058 result.frame_number = iter->frame_number;
4059 result.result = iter->resultMetadata;
4060 result.partial_result = iter->partial_result_cnt;
4061
4062 // If this pending buffer has result metadata, we may be able to send out shutter callback
4063 // and result metadata.
4064 if (iter->resultMetadata != nullptr) {
4065 if (!readyToSend) {
4066 // If any of the previous pending request is not ready, this pending request is
4067 // also not ready to send in order to keep shutter callbacks and result metadata
4068 // in order.
4069 iter++;
4070 continue;
4071 }
4072
4073 // Invoke shutter callback if not yet.
4074 if (!iter->shutter_notified) {
4075 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4076
4077 // Find the timestamp in HDR+ result metadata
4078 camera_metadata_ro_entry_t entry;
4079 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4080 ANDROID_SENSOR_TIMESTAMP, &entry);
4081 if (res != OK) {
4082 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4083 __FUNCTION__, iter->frame_number, strerror(-res), res);
4084 } else {
4085 timestamp = entry.data.i64[0];
4086 }
4087
4088 camera3_notify_msg_t notify_msg = {};
4089 notify_msg.type = CAMERA3_MSG_SHUTTER;
4090 notify_msg.message.shutter.frame_number = iter->frame_number;
4091 notify_msg.message.shutter.timestamp = timestamp;
4092 orchestrateNotify(&notify_msg);
4093 iter->shutter_notified = true;
4094 }
4095
4096 result.input_buffer = iter->input_buffer;
4097
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004098 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4099 // If the result metadata belongs to a live request, notify errors for previous pending
4100 // live requests.
4101 mPendingLiveRequest--;
4102
4103 CameraMetadata dummyMetadata;
4104 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4105 result.result = dummyMetadata.release();
4106
4107 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004108
4109 // partial_result should be PARTIAL_RESULT_CNT in case of
4110 // ERROR_RESULT.
4111 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4112 result.partial_result = PARTIAL_RESULT_COUNT;
4113
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004114 } else {
4115 iter++;
4116 continue;
4117 }
4118
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004119 // Prepare output buffer array
4120 for (auto bufferInfoIter = iter->buffers.begin();
4121 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4122 if (bufferInfoIter->buffer != nullptr) {
4123
4124 QCamera3Channel *channel =
4125 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4126 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4127
4128 // Check if this buffer is a dropped frame.
4129 auto frameDropIter = mPendingFrameDropList.begin();
4130 while (frameDropIter != mPendingFrameDropList.end()) {
4131 if((frameDropIter->stream_ID == streamID) &&
4132 (frameDropIter->frame_number == frameNumber)) {
4133 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4134 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4135 streamID);
4136 mPendingFrameDropList.erase(frameDropIter);
4137 break;
4138 } else {
4139 frameDropIter++;
4140 }
4141 }
4142
4143 // Check buffer error status
4144 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4145 bufferInfoIter->buffer->buffer);
4146 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4147
4148 outputBuffers.push_back(*(bufferInfoIter->buffer));
4149 free(bufferInfoIter->buffer);
4150 bufferInfoIter->buffer = NULL;
4151 }
4152 }
4153
4154 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4155 result.num_output_buffers = outputBuffers.size();
4156
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 orchestrateResult(&result);
4158
4159 // For reprocessing, result metadata is the same as settings so do not free it here to
4160 // avoid double free.
4161 if (result.result != iter->settings) {
4162 free_camera_metadata((camera_metadata_t *)result.result);
4163 }
4164 iter->resultMetadata = nullptr;
4165 iter = erasePendingRequest(iter);
4166 }
4167
4168 if (liveRequest) {
4169 for (auto &iter : mPendingRequestsList) {
4170 // Increment pipeline depth for the following pending requests.
4171 if (iter.frame_number > frameNumber) {
4172 iter.pipeline_depth++;
4173 }
4174 }
4175 }
4176
4177 unblockRequestIfNecessary();
4178}
4179
Thierry Strudel3d639192016-09-09 11:52:26 -07004180/*===========================================================================
4181 * FUNCTION : unblockRequestIfNecessary
4182 *
4183 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4184 * that mMutex is held when this function is called.
4185 *
4186 * PARAMETERS :
4187 *
4188 * RETURN :
4189 *
4190 *==========================================================================*/
4191void QCamera3HardwareInterface::unblockRequestIfNecessary()
4192{
4193 // Unblock process_capture_request
4194 pthread_cond_signal(&mRequestCond);
4195}
4196
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004197/*===========================================================================
4198 * FUNCTION : isHdrSnapshotRequest
4199 *
4200 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4201 *
4202 * PARAMETERS : camera3 request structure
4203 *
4204 * RETURN : boolean decision variable
4205 *
4206 *==========================================================================*/
4207bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4208{
4209 if (request == NULL) {
4210 LOGE("Invalid request handle");
4211 assert(0);
4212 return false;
4213 }
4214
4215 if (!mForceHdrSnapshot) {
4216 CameraMetadata frame_settings;
4217 frame_settings = request->settings;
4218
4219 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4220 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4221 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4222 return false;
4223 }
4224 } else {
4225 return false;
4226 }
4227
4228 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4229 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4230 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4231 return false;
4232 }
4233 } else {
4234 return false;
4235 }
4236 }
4237
4238 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4239 if (request->output_buffers[i].stream->format
4240 == HAL_PIXEL_FORMAT_BLOB) {
4241 return true;
4242 }
4243 }
4244
4245 return false;
4246}
4247/*===========================================================================
4248 * FUNCTION : orchestrateRequest
4249 *
4250 * DESCRIPTION: Orchestrates a capture request from camera service
4251 *
4252 * PARAMETERS :
4253 * @request : request from framework to process
4254 *
4255 * RETURN : Error status codes
4256 *
4257 *==========================================================================*/
4258int32_t QCamera3HardwareInterface::orchestrateRequest(
4259 camera3_capture_request_t *request)
4260{
4261
4262 uint32_t originalFrameNumber = request->frame_number;
4263 uint32_t originalOutputCount = request->num_output_buffers;
4264 const camera_metadata_t *original_settings = request->settings;
4265 List<InternalRequest> internallyRequestedStreams;
4266 List<InternalRequest> emptyInternalList;
4267
4268 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4269 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4270 uint32_t internalFrameNumber;
4271 CameraMetadata modified_meta;
4272
4273
4274 /* Add Blob channel to list of internally requested streams */
4275 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4276 if (request->output_buffers[i].stream->format
4277 == HAL_PIXEL_FORMAT_BLOB) {
4278 InternalRequest streamRequested;
4279 streamRequested.meteringOnly = 1;
4280 streamRequested.need_metadata = 0;
4281 streamRequested.stream = request->output_buffers[i].stream;
4282 internallyRequestedStreams.push_back(streamRequested);
4283 }
4284 }
4285 request->num_output_buffers = 0;
4286 auto itr = internallyRequestedStreams.begin();
4287
4288 /* Modify setting to set compensation */
4289 modified_meta = request->settings;
4290 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4291 uint8_t aeLock = 1;
4292 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4293 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4294 camera_metadata_t *modified_settings = modified_meta.release();
4295 request->settings = modified_settings;
4296
4297 /* Capture Settling & -2x frame */
4298 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4299 request->frame_number = internalFrameNumber;
4300 processCaptureRequest(request, internallyRequestedStreams);
4301
4302 request->num_output_buffers = originalOutputCount;
4303 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4304 request->frame_number = internalFrameNumber;
4305 processCaptureRequest(request, emptyInternalList);
4306 request->num_output_buffers = 0;
4307
4308 modified_meta = modified_settings;
4309 expCompensation = 0;
4310 aeLock = 1;
4311 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4312 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4313 modified_settings = modified_meta.release();
4314 request->settings = modified_settings;
4315
4316 /* Capture Settling & 0X frame */
4317
4318 itr = internallyRequestedStreams.begin();
4319 if (itr == internallyRequestedStreams.end()) {
4320 LOGE("Error Internally Requested Stream list is empty");
4321 assert(0);
4322 } else {
4323 itr->need_metadata = 0;
4324 itr->meteringOnly = 1;
4325 }
4326
4327 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4328 request->frame_number = internalFrameNumber;
4329 processCaptureRequest(request, internallyRequestedStreams);
4330
4331 itr = internallyRequestedStreams.begin();
4332 if (itr == internallyRequestedStreams.end()) {
4333 ALOGE("Error Internally Requested Stream list is empty");
4334 assert(0);
4335 } else {
4336 itr->need_metadata = 1;
4337 itr->meteringOnly = 0;
4338 }
4339
4340 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4341 request->frame_number = internalFrameNumber;
4342 processCaptureRequest(request, internallyRequestedStreams);
4343
4344 /* Capture 2X frame*/
4345 modified_meta = modified_settings;
4346 expCompensation = GB_HDR_2X_STEP_EV;
4347 aeLock = 1;
4348 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4349 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4350 modified_settings = modified_meta.release();
4351 request->settings = modified_settings;
4352
4353 itr = internallyRequestedStreams.begin();
4354 if (itr == internallyRequestedStreams.end()) {
4355 ALOGE("Error Internally Requested Stream list is empty");
4356 assert(0);
4357 } else {
4358 itr->need_metadata = 0;
4359 itr->meteringOnly = 1;
4360 }
4361 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4362 request->frame_number = internalFrameNumber;
4363 processCaptureRequest(request, internallyRequestedStreams);
4364
4365 itr = internallyRequestedStreams.begin();
4366 if (itr == internallyRequestedStreams.end()) {
4367 ALOGE("Error Internally Requested Stream list is empty");
4368 assert(0);
4369 } else {
4370 itr->need_metadata = 1;
4371 itr->meteringOnly = 0;
4372 }
4373
4374 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4375 request->frame_number = internalFrameNumber;
4376 processCaptureRequest(request, internallyRequestedStreams);
4377
4378
4379 /* Capture 2X on original streaming config*/
4380 internallyRequestedStreams.clear();
4381
4382 /* Restore original settings pointer */
4383 request->settings = original_settings;
4384 } else {
4385 uint32_t internalFrameNumber;
4386 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 return processCaptureRequest(request, internallyRequestedStreams);
4389 }
4390
4391 return NO_ERROR;
4392}
4393
4394/*===========================================================================
4395 * FUNCTION : orchestrateResult
4396 *
4397 * DESCRIPTION: Orchestrates a capture result to camera service
4398 *
4399 * PARAMETERS :
4400 * @request : request from framework to process
4401 *
4402 * RETURN :
4403 *
4404 *==========================================================================*/
4405void QCamera3HardwareInterface::orchestrateResult(
4406 camera3_capture_result_t *result)
4407{
4408 uint32_t frameworkFrameNumber;
4409 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4410 frameworkFrameNumber);
4411 if (rc != NO_ERROR) {
4412 LOGE("Cannot find translated frameworkFrameNumber");
4413 assert(0);
4414 } else {
4415 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004416 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004417 } else {
4418 result->frame_number = frameworkFrameNumber;
4419 mCallbackOps->process_capture_result(mCallbackOps, result);
4420 }
4421 }
4422}
4423
4424/*===========================================================================
4425 * FUNCTION : orchestrateNotify
4426 *
4427 * DESCRIPTION: Orchestrates a notify to camera service
4428 *
4429 * PARAMETERS :
4430 * @request : request from framework to process
4431 *
4432 * RETURN :
4433 *
4434 *==========================================================================*/
4435void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4436{
4437 uint32_t frameworkFrameNumber;
4438 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004439 int32_t rc = NO_ERROR;
4440
4441 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004442 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004443
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004444 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004445 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4446 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4447 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004448 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004449 LOGE("Cannot find translated frameworkFrameNumber");
4450 assert(0);
4451 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004452 }
4453 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004454
4455 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4456 LOGD("Internal Request drop the notifyCb");
4457 } else {
4458 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4459 mCallbackOps->notify(mCallbackOps, notify_msg);
4460 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004461}
4462
4463/*===========================================================================
4464 * FUNCTION : FrameNumberRegistry
4465 *
4466 * DESCRIPTION: Constructor
4467 *
4468 * PARAMETERS :
4469 *
4470 * RETURN :
4471 *
4472 *==========================================================================*/
4473FrameNumberRegistry::FrameNumberRegistry()
4474{
4475 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4476}
4477
4478/*===========================================================================
4479 * FUNCTION : ~FrameNumberRegistry
4480 *
4481 * DESCRIPTION: Destructor
4482 *
4483 * PARAMETERS :
4484 *
4485 * RETURN :
4486 *
4487 *==========================================================================*/
4488FrameNumberRegistry::~FrameNumberRegistry()
4489{
4490}
4491
4492/*===========================================================================
4493 * FUNCTION : PurgeOldEntriesLocked
4494 *
4495 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4496 *
4497 * PARAMETERS :
4498 *
4499 * RETURN : NONE
4500 *
4501 *==========================================================================*/
4502void FrameNumberRegistry::purgeOldEntriesLocked()
4503{
4504 while (_register.begin() != _register.end()) {
4505 auto itr = _register.begin();
4506 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4507 _register.erase(itr);
4508 } else {
4509 return;
4510 }
4511 }
4512}
4513
4514/*===========================================================================
4515 * FUNCTION : allocStoreInternalFrameNumber
4516 *
4517 * DESCRIPTION: Method to note down a framework request and associate a new
4518 * internal request number against it
4519 *
4520 * PARAMETERS :
4521 * @fFrameNumber: Identifier given by framework
4522 * @internalFN : Output parameter which will have the newly generated internal
4523 * entry
4524 *
4525 * RETURN : Error code
4526 *
4527 *==========================================================================*/
4528int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4529 uint32_t &internalFrameNumber)
4530{
4531 Mutex::Autolock lock(mRegistryLock);
4532 internalFrameNumber = _nextFreeInternalNumber++;
4533 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4534 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4535 purgeOldEntriesLocked();
4536 return NO_ERROR;
4537}
4538
4539/*===========================================================================
4540 * FUNCTION : generateStoreInternalFrameNumber
4541 *
4542 * DESCRIPTION: Method to associate a new internal request number independent
4543 * of any associate with framework requests
4544 *
4545 * PARAMETERS :
4546 * @internalFrame#: Output parameter which will have the newly generated internal
4547 *
4548 *
4549 * RETURN : Error code
4550 *
4551 *==========================================================================*/
4552int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4553{
4554 Mutex::Autolock lock(mRegistryLock);
4555 internalFrameNumber = _nextFreeInternalNumber++;
4556 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4557 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4558 purgeOldEntriesLocked();
4559 return NO_ERROR;
4560}
4561
4562/*===========================================================================
4563 * FUNCTION : getFrameworkFrameNumber
4564 *
4565 * DESCRIPTION: Method to query the framework framenumber given an internal #
4566 *
4567 * PARAMETERS :
4568 * @internalFrame#: Internal reference
4569 * @frameworkframenumber: Output parameter holding framework frame entry
4570 *
4571 * RETURN : Error code
4572 *
4573 *==========================================================================*/
4574int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4575 uint32_t &frameworkFrameNumber)
4576{
4577 Mutex::Autolock lock(mRegistryLock);
4578 auto itr = _register.find(internalFrameNumber);
4579 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004580 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004581 return -ENOENT;
4582 }
4583
4584 frameworkFrameNumber = itr->second;
4585 purgeOldEntriesLocked();
4586 return NO_ERROR;
4587}
Thierry Strudel3d639192016-09-09 11:52:26 -07004588
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004589status_t QCamera3HardwareInterface::fillPbStreamConfig(
4590 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4591 QCamera3Channel *channel, uint32_t streamIndex) {
4592 if (config == nullptr) {
4593 LOGE("%s: config is null", __FUNCTION__);
4594 return BAD_VALUE;
4595 }
4596
4597 if (channel == nullptr) {
4598 LOGE("%s: channel is null", __FUNCTION__);
4599 return BAD_VALUE;
4600 }
4601
4602 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4603 if (stream == nullptr) {
4604 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4605 return NAME_NOT_FOUND;
4606 }
4607
4608 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4609 if (streamInfo == nullptr) {
4610 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4611 return NAME_NOT_FOUND;
4612 }
4613
4614 config->id = pbStreamId;
4615 config->image.width = streamInfo->dim.width;
4616 config->image.height = streamInfo->dim.height;
4617 config->image.padding = 0;
4618 config->image.format = pbStreamFormat;
4619
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004620 uint32_t totalPlaneSize = 0;
4621
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004622 // Fill plane information.
4623 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4624 pbcamera::PlaneConfiguration plane;
4625 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4626 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4627 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004628
4629 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004630 }
4631
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004632 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004633 return OK;
4634}
4635
Thierry Strudel3d639192016-09-09 11:52:26 -07004636/*===========================================================================
4637 * FUNCTION : processCaptureRequest
4638 *
4639 * DESCRIPTION: process a capture request from camera service
4640 *
4641 * PARAMETERS :
4642 * @request : request from framework to process
4643 *
4644 * RETURN :
4645 *
4646 *==========================================================================*/
4647int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004648 camera3_capture_request_t *request,
4649 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004650{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004651 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004652 int rc = NO_ERROR;
4653 int32_t request_id;
4654 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004655 bool isVidBufRequested = false;
4656 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004657 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004658
4659 pthread_mutex_lock(&mMutex);
4660
4661 // Validate current state
4662 switch (mState) {
4663 case CONFIGURED:
4664 case STARTED:
4665 /* valid state */
4666 break;
4667
4668 case ERROR:
4669 pthread_mutex_unlock(&mMutex);
4670 handleCameraDeviceError();
4671 return -ENODEV;
4672
4673 default:
4674 LOGE("Invalid state %d", mState);
4675 pthread_mutex_unlock(&mMutex);
4676 return -ENODEV;
4677 }
4678
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004679 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004680 if (rc != NO_ERROR) {
4681 LOGE("incoming request is not valid");
4682 pthread_mutex_unlock(&mMutex);
4683 return rc;
4684 }
4685
4686 meta = request->settings;
4687
4688 // For first capture request, send capture intent, and
4689 // stream on all streams
4690 if (mState == CONFIGURED) {
4691 // send an unconfigure to the backend so that the isp
4692 // resources are deallocated
4693 if (!mFirstConfiguration) {
4694 cam_stream_size_info_t stream_config_info;
4695 int32_t hal_version = CAM_HAL_V3;
4696 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4697 stream_config_info.buffer_info.min_buffers =
4698 MIN_INFLIGHT_REQUESTS;
4699 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004700 m_bIs4KVideo ? 0 :
4701 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004702 clear_metadata_buffer(mParameters);
4703 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4704 CAM_INTF_PARM_HAL_VERSION, hal_version);
4705 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4706 CAM_INTF_META_STREAM_INFO, stream_config_info);
4707 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4708 mParameters);
4709 if (rc < 0) {
4710 LOGE("set_parms for unconfigure failed");
4711 pthread_mutex_unlock(&mMutex);
4712 return rc;
4713 }
4714 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004715 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004717 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004718 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004719 property_get("persist.camera.is_type", is_type_value, "4");
4720 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4721 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4722 property_get("persist.camera.is_type_preview", is_type_value, "4");
4723 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4724 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004725
4726 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4727 int32_t hal_version = CAM_HAL_V3;
4728 uint8_t captureIntent =
4729 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4730 mCaptureIntent = captureIntent;
4731 clear_metadata_buffer(mParameters);
4732 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4733 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4734 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004735 if (mFirstConfiguration) {
4736 // configure instant AEC
4737 // Instant AEC is a session based parameter and it is needed only
4738 // once per complete session after open camera.
4739 // i.e. This is set only once for the first capture request, after open camera.
4740 setInstantAEC(meta);
4741 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004742 uint8_t fwkVideoStabMode=0;
4743 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4744 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4745 }
4746
4747 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4748 // turn it on for video/preview
4749 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4750 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004751 int32_t vsMode;
4752 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4754 rc = BAD_VALUE;
4755 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004756 LOGD("setEis %d", setEis);
4757 bool eis3Supported = false;
4758 size_t count = IS_TYPE_MAX;
4759 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4760 for (size_t i = 0; i < count; i++) {
4761 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4762 eis3Supported = true;
4763 break;
4764 }
4765 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004766
4767 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004768 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004769 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4770 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004771 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4772 is_type = isTypePreview;
4773 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4774 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4775 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004776 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004777 } else {
4778 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004779 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004780 } else {
4781 is_type = IS_TYPE_NONE;
4782 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004784 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4786 }
4787 }
4788
4789 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4790 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4791
Thierry Strudel54dc9782017-02-15 12:12:10 -08004792 //Disable tintless only if the property is set to 0
4793 memset(prop, 0, sizeof(prop));
4794 property_get("persist.camera.tintless.enable", prop, "1");
4795 int32_t tintless_value = atoi(prop);
4796
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4798 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004799
Thierry Strudel3d639192016-09-09 11:52:26 -07004800 //Disable CDS for HFR mode or if DIS/EIS is on.
4801 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4802 //after every configure_stream
4803 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4804 (m_bIsVideo)) {
4805 int32_t cds = CAM_CDS_MODE_OFF;
4806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4807 CAM_INTF_PARM_CDS_MODE, cds))
4808 LOGE("Failed to disable CDS for HFR mode");
4809
4810 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004811
4812 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4813 uint8_t* use_av_timer = NULL;
4814
4815 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004816 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004817 use_av_timer = &m_debug_avtimer;
4818 }
4819 else{
4820 use_av_timer =
4821 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004822 if (use_av_timer) {
4823 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4824 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004825 }
4826
4827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4828 rc = BAD_VALUE;
4829 }
4830 }
4831
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 setMobicat();
4833
4834 /* Set fps and hfr mode while sending meta stream info so that sensor
4835 * can configure appropriate streaming mode */
4836 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4838 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4840 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 if (rc == NO_ERROR) {
4842 int32_t max_fps =
4843 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004844 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4846 }
4847 /* For HFR, more buffers are dequeued upfront to improve the performance */
4848 if (mBatchSize) {
4849 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4850 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4851 }
4852 }
4853 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 LOGE("setHalFpsRange failed");
4855 }
4856 }
4857 if (meta.exists(ANDROID_CONTROL_MODE)) {
4858 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4859 rc = extractSceneMode(meta, metaMode, mParameters);
4860 if (rc != NO_ERROR) {
4861 LOGE("extractSceneMode failed");
4862 }
4863 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004864 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004865
Thierry Strudel04e026f2016-10-10 11:27:36 -07004866 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4867 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4868 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4869 rc = setVideoHdrMode(mParameters, vhdr);
4870 if (rc != NO_ERROR) {
4871 LOGE("setVideoHDR is failed");
4872 }
4873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 //TODO: validate the arguments, HSV scenemode should have only the
4876 //advertised fps ranges
4877
4878 /*set the capture intent, hal version, tintless, stream info,
4879 *and disenable parameters to the backend*/
4880 LOGD("set_parms META_STREAM_INFO " );
4881 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004882 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4883 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 mStreamConfigInfo.type[i],
4885 mStreamConfigInfo.stream_sizes[i].width,
4886 mStreamConfigInfo.stream_sizes[i].height,
4887 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 mStreamConfigInfo.format[i],
4889 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004890 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4893 mParameters);
4894 if (rc < 0) {
4895 LOGE("set_parms failed for hal version, stream info");
4896 }
4897
Chien-Yu Chenee335912017-02-09 17:53:20 -08004898 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4899 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 if (rc != NO_ERROR) {
4901 LOGE("Failed to get sensor output size");
4902 pthread_mutex_unlock(&mMutex);
4903 goto error_exit;
4904 }
4905
4906 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4907 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004908 mSensorModeInfo.active_array_size.width,
4909 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004911 if (gHdrPlusClient != nullptr) {
4912 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4913 if (rc != OK) {
4914 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4915 mCameraId, mSensorModeInfo.op_pixel_clk);
4916 pthread_mutex_unlock(&mMutex);
4917 goto error_exit;
4918 }
4919 }
4920
Thierry Strudel3d639192016-09-09 11:52:26 -07004921 /* Set batchmode before initializing channel. Since registerBuffer
4922 * internally initializes some of the channels, better set batchmode
4923 * even before first register buffer */
4924 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4925 it != mStreamInfo.end(); it++) {
4926 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4927 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4928 && mBatchSize) {
4929 rc = channel->setBatchSize(mBatchSize);
4930 //Disable per frame map unmap for HFR/batchmode case
4931 rc |= channel->setPerFrameMapUnmap(false);
4932 if (NO_ERROR != rc) {
4933 LOGE("Channel init failed %d", rc);
4934 pthread_mutex_unlock(&mMutex);
4935 goto error_exit;
4936 }
4937 }
4938 }
4939
4940 //First initialize all streams
4941 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4942 it != mStreamInfo.end(); it++) {
4943 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4944 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4945 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 setEis) {
4947 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4948 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4949 is_type = mStreamConfigInfo.is_type[i];
4950 break;
4951 }
4952 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004954 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 rc = channel->initialize(IS_TYPE_NONE);
4956 }
4957 if (NO_ERROR != rc) {
4958 LOGE("Channel initialization failed %d", rc);
4959 pthread_mutex_unlock(&mMutex);
4960 goto error_exit;
4961 }
4962 }
4963
4964 if (mRawDumpChannel) {
4965 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4966 if (rc != NO_ERROR) {
4967 LOGE("Error: Raw Dump Channel init failed");
4968 pthread_mutex_unlock(&mMutex);
4969 goto error_exit;
4970 }
4971 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004972 if (mHdrPlusRawSrcChannel) {
4973 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4974 if (rc != NO_ERROR) {
4975 LOGE("Error: HDR+ RAW Source Channel init failed");
4976 pthread_mutex_unlock(&mMutex);
4977 goto error_exit;
4978 }
4979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 if (mSupportChannel) {
4981 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4982 if (rc < 0) {
4983 LOGE("Support channel initialization failed");
4984 pthread_mutex_unlock(&mMutex);
4985 goto error_exit;
4986 }
4987 }
4988 if (mAnalysisChannel) {
4989 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4990 if (rc < 0) {
4991 LOGE("Analysis channel initialization failed");
4992 pthread_mutex_unlock(&mMutex);
4993 goto error_exit;
4994 }
4995 }
4996 if (mDummyBatchChannel) {
4997 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4998 if (rc < 0) {
4999 LOGE("mDummyBatchChannel setBatchSize failed");
5000 pthread_mutex_unlock(&mMutex);
5001 goto error_exit;
5002 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005003 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (rc < 0) {
5005 LOGE("mDummyBatchChannel initialization failed");
5006 pthread_mutex_unlock(&mMutex);
5007 goto error_exit;
5008 }
5009 }
5010
5011 // Set bundle info
5012 rc = setBundleInfo();
5013 if (rc < 0) {
5014 LOGE("setBundleInfo failed %d", rc);
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018
5019 //update settings from app here
5020 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5021 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5022 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5023 }
5024 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5025 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5026 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5027 }
5028 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5029 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5030 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5031
5032 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5033 (mLinkedCameraId != mCameraId) ) {
5034 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5035 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 goto error_exit;
5038 }
5039 }
5040
5041 // add bundle related cameras
5042 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5043 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005044 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5045 &m_pDualCamCmdPtr->bundle_info;
5046 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 if (mIsDeviceLinked)
5048 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5049 else
5050 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5051
5052 pthread_mutex_lock(&gCamLock);
5053
5054 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5055 LOGE("Dualcam: Invalid Session Id ");
5056 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005057 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 goto error_exit;
5059 }
5060
5061 if (mIsMainCamera == 1) {
5062 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5063 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005064 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005065 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 // related session id should be session id of linked session
5067 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5068 } else {
5069 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5070 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005071 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005072 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5074 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005075 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 pthread_mutex_unlock(&gCamLock);
5077
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005078 rc = mCameraHandle->ops->set_dual_cam_cmd(
5079 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 if (rc < 0) {
5081 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005082 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005083 goto error_exit;
5084 }
5085 }
5086
5087 //Then start them.
5088 LOGH("Start META Channel");
5089 rc = mMetadataChannel->start();
5090 if (rc < 0) {
5091 LOGE("META channel start failed");
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095
5096 if (mAnalysisChannel) {
5097 rc = mAnalysisChannel->start();
5098 if (rc < 0) {
5099 LOGE("Analysis channel start failed");
5100 mMetadataChannel->stop();
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105
5106 if (mSupportChannel) {
5107 rc = mSupportChannel->start();
5108 if (rc < 0) {
5109 LOGE("Support channel start failed");
5110 mMetadataChannel->stop();
5111 /* Although support and analysis are mutually exclusive today
5112 adding it in anycase for future proofing */
5113 if (mAnalysisChannel) {
5114 mAnalysisChannel->stop();
5115 }
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
5120 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5121 it != mStreamInfo.end(); it++) {
5122 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5123 LOGH("Start Processing Channel mask=%d",
5124 channel->getStreamTypeMask());
5125 rc = channel->start();
5126 if (rc < 0) {
5127 LOGE("channel start failed");
5128 pthread_mutex_unlock(&mMutex);
5129 goto error_exit;
5130 }
5131 }
5132
5133 if (mRawDumpChannel) {
5134 LOGD("Starting raw dump stream");
5135 rc = mRawDumpChannel->start();
5136 if (rc != NO_ERROR) {
5137 LOGE("Error Starting Raw Dump Channel");
5138 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5139 it != mStreamInfo.end(); it++) {
5140 QCamera3Channel *channel =
5141 (QCamera3Channel *)(*it)->stream->priv;
5142 LOGH("Stopping Processing Channel mask=%d",
5143 channel->getStreamTypeMask());
5144 channel->stop();
5145 }
5146 if (mSupportChannel)
5147 mSupportChannel->stop();
5148 if (mAnalysisChannel) {
5149 mAnalysisChannel->stop();
5150 }
5151 mMetadataChannel->stop();
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156
5157 if (mChannelHandle) {
5158
5159 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5160 mChannelHandle);
5161 if (rc != NO_ERROR) {
5162 LOGE("start_channel failed %d", rc);
5163 pthread_mutex_unlock(&mMutex);
5164 goto error_exit;
5165 }
5166 }
5167
5168 goto no_error;
5169error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005170 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 return rc;
5172no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 mWokenUpByDaemon = false;
5174 mPendingLiveRequest = 0;
5175 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 }
5177
Chien-Yu Chenee335912017-02-09 17:53:20 -08005178 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005179 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005180 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5181 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5182 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5183 rc = enableHdrPlusModeLocked();
5184 if (rc != OK) {
5185 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5186 pthread_mutex_unlock(&mMutex);
5187 return rc;
5188 }
5189
5190 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5191 if (mHdrPlusRawSrcChannel) {
5192 rc = mHdrPlusRawSrcChannel->start();
5193 if (rc != OK) {
5194 LOGE("Error Starting HDR+ RAW Channel");
5195 pthread_mutex_unlock(&mMutex);
5196 return rc;
5197 }
5198 }
5199 mFirstPreviewIntentSeen = true;
5200 }
5201
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005203 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005204
5205 if (mFlushPerf) {
5206 //we cannot accept any requests during flush
5207 LOGE("process_capture_request cannot proceed during flush");
5208 pthread_mutex_unlock(&mMutex);
5209 return NO_ERROR; //should return an error
5210 }
5211
5212 if (meta.exists(ANDROID_REQUEST_ID)) {
5213 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5214 mCurrentRequestId = request_id;
5215 LOGD("Received request with id: %d", request_id);
5216 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5217 LOGE("Unable to find request id field, \
5218 & no previous id available");
5219 pthread_mutex_unlock(&mMutex);
5220 return NAME_NOT_FOUND;
5221 } else {
5222 LOGD("Re-using old request id");
5223 request_id = mCurrentRequestId;
5224 }
5225
5226 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5227 request->num_output_buffers,
5228 request->input_buffer,
5229 frameNumber);
5230 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005231 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005233 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 uint32_t snapshotStreamId = 0;
5235 for (size_t i = 0; i < request->num_output_buffers; i++) {
5236 const camera3_stream_buffer_t& output = request->output_buffers[i];
5237 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5238
Emilian Peev7650c122017-01-19 08:24:33 -08005239 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5240 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005241 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 blob_request = 1;
5243 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5244 }
5245
5246 if (output.acquire_fence != -1) {
5247 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5248 close(output.acquire_fence);
5249 if (rc != OK) {
5250 LOGE("sync wait failed %d", rc);
5251 pthread_mutex_unlock(&mMutex);
5252 return rc;
5253 }
5254 }
5255
Emilian Peev0f3c3162017-03-15 12:57:46 +00005256 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5257 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005258 depthRequestPresent = true;
5259 continue;
5260 }
5261
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005264
5265 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5266 isVidBufRequested = true;
5267 }
5268 }
5269
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005270 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5271 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5272 itr++) {
5273 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5274 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5275 channel->getStreamID(channel->getStreamTypeMask());
5276
5277 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5278 isVidBufRequested = true;
5279 }
5280 }
5281
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005284 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 }
5286 if (blob_request && mRawDumpChannel) {
5287 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005288 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005289 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 }
5292
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005293 {
5294 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5295 // Request a RAW buffer if
5296 // 1. mHdrPlusRawSrcChannel is valid.
5297 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5298 // 3. There is no pending HDR+ request.
5299 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5300 mHdrPlusPendingRequests.size() == 0) {
5301 streamsArray.stream_request[streamsArray.num_streams].streamID =
5302 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5303 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5304 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005305 }
5306
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005307 //extract capture intent
5308 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5309 mCaptureIntent =
5310 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5311 }
5312
5313 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5314 mCacMode =
5315 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5316 }
5317
5318 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005319 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005320
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005321 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005322 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005323 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5324 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005325 }
5326
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005327 if (hdrPlusRequest) {
5328 // For a HDR+ request, just set the frame parameters.
5329 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5330 if (rc < 0) {
5331 LOGE("fail to set frame parameters");
5332 pthread_mutex_unlock(&mMutex);
5333 return rc;
5334 }
5335 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 /* Parse the settings:
5337 * - For every request in NORMAL MODE
5338 * - For every request in HFR mode during preview only case
5339 * - For first request of every batch in HFR mode during video
5340 * recording. In batchmode the same settings except frame number is
5341 * repeated in each request of the batch.
5342 */
5343 if (!mBatchSize ||
5344 (mBatchSize && !isVidBufRequested) ||
5345 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 if (rc < 0) {
5348 LOGE("fail to set frame parameters");
5349 pthread_mutex_unlock(&mMutex);
5350 return rc;
5351 }
5352 }
5353 /* For batchMode HFR, setFrameParameters is not called for every
5354 * request. But only frame number of the latest request is parsed.
5355 * Keep track of first and last frame numbers in a batch so that
5356 * metadata for the frame numbers of batch can be duplicated in
5357 * handleBatchMetadta */
5358 if (mBatchSize) {
5359 if (!mToBeQueuedVidBufs) {
5360 //start of the batch
5361 mFirstFrameNumberInBatch = request->frame_number;
5362 }
5363 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5364 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5365 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005366 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005367 return BAD_VALUE;
5368 }
5369 }
5370 if (mNeedSensorRestart) {
5371 /* Unlock the mutex as restartSensor waits on the channels to be
5372 * stopped, which in turn calls stream callback functions -
5373 * handleBufferWithLock and handleMetadataWithLock */
5374 pthread_mutex_unlock(&mMutex);
5375 rc = dynamicUpdateMetaStreamInfo();
5376 if (rc != NO_ERROR) {
5377 LOGE("Restarting the sensor failed");
5378 return BAD_VALUE;
5379 }
5380 mNeedSensorRestart = false;
5381 pthread_mutex_lock(&mMutex);
5382 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005383 if(mResetInstantAEC) {
5384 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5385 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5386 mResetInstantAEC = false;
5387 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005388 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 if (request->input_buffer->acquire_fence != -1) {
5390 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5391 close(request->input_buffer->acquire_fence);
5392 if (rc != OK) {
5393 LOGE("input buffer sync wait failed %d", rc);
5394 pthread_mutex_unlock(&mMutex);
5395 return rc;
5396 }
5397 }
5398 }
5399
5400 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5401 mLastCustIntentFrmNum = frameNumber;
5402 }
5403 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005404 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 pendingRequestIterator latestRequest;
5406 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005407 pendingRequest.num_buffers = depthRequestPresent ?
5408 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 pendingRequest.request_id = request_id;
5410 pendingRequest.blob_request = blob_request;
5411 pendingRequest.timestamp = 0;
5412 pendingRequest.bUrgentReceived = 0;
5413 if (request->input_buffer) {
5414 pendingRequest.input_buffer =
5415 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5416 *(pendingRequest.input_buffer) = *(request->input_buffer);
5417 pInputBuffer = pendingRequest.input_buffer;
5418 } else {
5419 pendingRequest.input_buffer = NULL;
5420 pInputBuffer = NULL;
5421 }
5422
5423 pendingRequest.pipeline_depth = 0;
5424 pendingRequest.partial_result_cnt = 0;
5425 extractJpegMetadata(mCurJpegMeta, request);
5426 pendingRequest.jpegMetadata = mCurJpegMeta;
5427 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5428 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005430 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5431 mHybridAeEnable =
5432 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5433 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005434
5435 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5436 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005437 /* DevCamDebug metadata processCaptureRequest */
5438 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5439 mDevCamDebugMetaEnable =
5440 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5441 }
5442 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5443 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005444
5445 //extract CAC info
5446 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5447 mCacMode =
5448 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5449 }
5450 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005451 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005452
5453 PendingBuffersInRequest bufsForCurRequest;
5454 bufsForCurRequest.frame_number = frameNumber;
5455 // Mark current timestamp for the new request
5456 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005458
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005459 if (hdrPlusRequest) {
5460 // Save settings for this request.
5461 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5462 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5463
5464 // Add to pending HDR+ request queue.
5465 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5466 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5467
5468 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5469 }
5470
Thierry Strudel3d639192016-09-09 11:52:26 -07005471 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005472 if ((request->output_buffers[i].stream->data_space ==
5473 HAL_DATASPACE_DEPTH) &&
5474 (HAL_PIXEL_FORMAT_BLOB ==
5475 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005476 continue;
5477 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005478 RequestedBufferInfo requestedBuf;
5479 memset(&requestedBuf, 0, sizeof(requestedBuf));
5480 requestedBuf.stream = request->output_buffers[i].stream;
5481 requestedBuf.buffer = NULL;
5482 pendingRequest.buffers.push_back(requestedBuf);
5483
5484 // Add to buffer handle the pending buffers list
5485 PendingBufferInfo bufferInfo;
5486 bufferInfo.buffer = request->output_buffers[i].buffer;
5487 bufferInfo.stream = request->output_buffers[i].stream;
5488 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5489 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5490 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5491 frameNumber, bufferInfo.buffer,
5492 channel->getStreamTypeMask(), bufferInfo.stream->format);
5493 }
5494 // Add this request packet into mPendingBuffersMap
5495 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5496 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5497 mPendingBuffersMap.get_num_overall_buffers());
5498
5499 latestRequest = mPendingRequestsList.insert(
5500 mPendingRequestsList.end(), pendingRequest);
5501 if(mFlush) {
5502 LOGI("mFlush is true");
5503 pthread_mutex_unlock(&mMutex);
5504 return NO_ERROR;
5505 }
5506
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005507 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5508 // channel.
5509 if (!hdrPlusRequest) {
5510 int indexUsed;
5511 // Notify metadata channel we receive a request
5512 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005513
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005514 if(request->input_buffer != NULL){
5515 LOGD("Input request, frame_number %d", frameNumber);
5516 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5517 if (NO_ERROR != rc) {
5518 LOGE("fail to set reproc parameters");
5519 pthread_mutex_unlock(&mMutex);
5520 return rc;
5521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005522 }
5523
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005524 // Call request on other streams
5525 uint32_t streams_need_metadata = 0;
5526 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5527 for (size_t i = 0; i < request->num_output_buffers; i++) {
5528 const camera3_stream_buffer_t& output = request->output_buffers[i];
5529 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5530
5531 if (channel == NULL) {
5532 LOGW("invalid channel pointer for stream");
5533 continue;
5534 }
5535
5536 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5537 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5538 output.buffer, request->input_buffer, frameNumber);
5539 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005540 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5542 if (rc < 0) {
5543 LOGE("Fail to request on picture channel");
5544 pthread_mutex_unlock(&mMutex);
5545 return rc;
5546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005547 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005548 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5549 assert(NULL != mDepthChannel);
5550 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551
Emilian Peev7650c122017-01-19 08:24:33 -08005552 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5553 if (rc < 0) {
5554 LOGE("Fail to map on depth buffer");
5555 pthread_mutex_unlock(&mMutex);
5556 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 }
Emilian Peev7650c122017-01-19 08:24:33 -08005558 } else {
5559 LOGD("snapshot request with buffer %p, frame_number %d",
5560 output.buffer, frameNumber);
5561 if (!request->settings) {
5562 rc = channel->request(output.buffer, frameNumber,
5563 NULL, mPrevParameters, indexUsed);
5564 } else {
5565 rc = channel->request(output.buffer, frameNumber,
5566 NULL, mParameters, indexUsed);
5567 }
5568 if (rc < 0) {
5569 LOGE("Fail to request on picture channel");
5570 pthread_mutex_unlock(&mMutex);
5571 return rc;
5572 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573
Emilian Peev7650c122017-01-19 08:24:33 -08005574 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5575 uint32_t j = 0;
5576 for (j = 0; j < streamsArray.num_streams; j++) {
5577 if (streamsArray.stream_request[j].streamID == streamId) {
5578 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5579 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5580 else
5581 streamsArray.stream_request[j].buf_index = indexUsed;
5582 break;
5583 }
5584 }
5585 if (j == streamsArray.num_streams) {
5586 LOGE("Did not find matching stream to update index");
5587 assert(0);
5588 }
5589
5590 pendingBufferIter->need_metadata = true;
5591 streams_need_metadata++;
5592 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005593 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005594 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5595 bool needMetadata = false;
5596 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5597 rc = yuvChannel->request(output.buffer, frameNumber,
5598 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5599 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005601 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005602 pthread_mutex_unlock(&mMutex);
5603 return rc;
5604 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005605
5606 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5607 uint32_t j = 0;
5608 for (j = 0; j < streamsArray.num_streams; j++) {
5609 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5611 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5612 else
5613 streamsArray.stream_request[j].buf_index = indexUsed;
5614 break;
5615 }
5616 }
5617 if (j == streamsArray.num_streams) {
5618 LOGE("Did not find matching stream to update index");
5619 assert(0);
5620 }
5621
5622 pendingBufferIter->need_metadata = needMetadata;
5623 if (needMetadata)
5624 streams_need_metadata += 1;
5625 LOGD("calling YUV channel request, need_metadata is %d",
5626 needMetadata);
5627 } else {
5628 LOGD("request with buffer %p, frame_number %d",
5629 output.buffer, frameNumber);
5630
5631 rc = channel->request(output.buffer, frameNumber, indexUsed);
5632
5633 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5634 uint32_t j = 0;
5635 for (j = 0; j < streamsArray.num_streams; j++) {
5636 if (streamsArray.stream_request[j].streamID == streamId) {
5637 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5638 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5639 else
5640 streamsArray.stream_request[j].buf_index = indexUsed;
5641 break;
5642 }
5643 }
5644 if (j == streamsArray.num_streams) {
5645 LOGE("Did not find matching stream to update index");
5646 assert(0);
5647 }
5648
5649 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5650 && mBatchSize) {
5651 mToBeQueuedVidBufs++;
5652 if (mToBeQueuedVidBufs == mBatchSize) {
5653 channel->queueBatchBuf();
5654 }
5655 }
5656 if (rc < 0) {
5657 LOGE("request failed");
5658 pthread_mutex_unlock(&mMutex);
5659 return rc;
5660 }
5661 }
5662 pendingBufferIter++;
5663 }
5664
5665 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5666 itr++) {
5667 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5668
5669 if (channel == NULL) {
5670 LOGE("invalid channel pointer for stream");
5671 assert(0);
5672 return BAD_VALUE;
5673 }
5674
5675 InternalRequest requestedStream;
5676 requestedStream = (*itr);
5677
5678
5679 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5680 LOGD("snapshot request internally input buffer %p, frame_number %d",
5681 request->input_buffer, frameNumber);
5682 if(request->input_buffer != NULL){
5683 rc = channel->request(NULL, frameNumber,
5684 pInputBuffer, &mReprocMeta, indexUsed, true,
5685 requestedStream.meteringOnly);
5686 if (rc < 0) {
5687 LOGE("Fail to request on picture channel");
5688 pthread_mutex_unlock(&mMutex);
5689 return rc;
5690 }
5691 } else {
5692 LOGD("snapshot request with frame_number %d", frameNumber);
5693 if (!request->settings) {
5694 rc = channel->request(NULL, frameNumber,
5695 NULL, mPrevParameters, indexUsed, true,
5696 requestedStream.meteringOnly);
5697 } else {
5698 rc = channel->request(NULL, frameNumber,
5699 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5700 }
5701 if (rc < 0) {
5702 LOGE("Fail to request on picture channel");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
5706
5707 if ((*itr).meteringOnly != 1) {
5708 requestedStream.need_metadata = 1;
5709 streams_need_metadata++;
5710 }
5711 }
5712
5713 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5714 uint32_t j = 0;
5715 for (j = 0; j < streamsArray.num_streams; j++) {
5716 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005717 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5718 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5719 else
5720 streamsArray.stream_request[j].buf_index = indexUsed;
5721 break;
5722 }
5723 }
5724 if (j == streamsArray.num_streams) {
5725 LOGE("Did not find matching stream to update index");
5726 assert(0);
5727 }
5728
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005729 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005731 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005732 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005733 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005734 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005735 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005736
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005737 //If 2 streams have need_metadata set to true, fail the request, unless
5738 //we copy/reference count the metadata buffer
5739 if (streams_need_metadata > 1) {
5740 LOGE("not supporting request in which two streams requires"
5741 " 2 HAL metadata for reprocessing");
5742 pthread_mutex_unlock(&mMutex);
5743 return -EINVAL;
5744 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005745
Emilian Peev7650c122017-01-19 08:24:33 -08005746 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5748 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5749 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5750 pthread_mutex_unlock(&mMutex);
5751 return BAD_VALUE;
5752 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 if (request->input_buffer == NULL) {
5754 /* Set the parameters to backend:
5755 * - For every request in NORMAL MODE
5756 * - For every request in HFR mode during preview only case
5757 * - Once every batch in HFR mode during video recording
5758 */
5759 if (!mBatchSize ||
5760 (mBatchSize && !isVidBufRequested) ||
5761 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5762 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5763 mBatchSize, isVidBufRequested,
5764 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005765
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005766 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5767 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5768 uint32_t m = 0;
5769 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5770 if (streamsArray.stream_request[k].streamID ==
5771 mBatchedStreamsArray.stream_request[m].streamID)
5772 break;
5773 }
5774 if (m == mBatchedStreamsArray.num_streams) {
5775 mBatchedStreamsArray.stream_request\
5776 [mBatchedStreamsArray.num_streams].streamID =
5777 streamsArray.stream_request[k].streamID;
5778 mBatchedStreamsArray.stream_request\
5779 [mBatchedStreamsArray.num_streams].buf_index =
5780 streamsArray.stream_request[k].buf_index;
5781 mBatchedStreamsArray.num_streams =
5782 mBatchedStreamsArray.num_streams + 1;
5783 }
5784 }
5785 streamsArray = mBatchedStreamsArray;
5786 }
5787 /* Update stream id of all the requested buffers */
5788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5789 streamsArray)) {
5790 LOGE("Failed to set stream type mask in the parameters");
5791 return BAD_VALUE;
5792 }
5793
5794 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5795 mParameters);
5796 if (rc < 0) {
5797 LOGE("set_parms failed");
5798 }
5799 /* reset to zero coz, the batch is queued */
5800 mToBeQueuedVidBufs = 0;
5801 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5802 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5803 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005804 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5805 uint32_t m = 0;
5806 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5807 if (streamsArray.stream_request[k].streamID ==
5808 mBatchedStreamsArray.stream_request[m].streamID)
5809 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005810 }
5811 if (m == mBatchedStreamsArray.num_streams) {
5812 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5813 streamID = streamsArray.stream_request[k].streamID;
5814 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5815 buf_index = streamsArray.stream_request[k].buf_index;
5816 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5817 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005818 }
5819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005821 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005822 }
5823
5824 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5825
5826 mState = STARTED;
5827 // Added a timed condition wait
5828 struct timespec ts;
5829 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005830 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005831 if (rc < 0) {
5832 isValidTimeout = 0;
5833 LOGE("Error reading the real time clock!!");
5834 }
5835 else {
5836 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 int64_t timeout = 5;
5838 {
5839 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5840 // If there is a pending HDR+ request, the following requests may be blocked until the
5841 // HDR+ request is done. So allow a longer timeout.
5842 if (mHdrPlusPendingRequests.size() > 0) {
5843 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5844 }
5845 }
5846 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005847 }
5848 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005849 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005850 (mState != ERROR) && (mState != DEINIT)) {
5851 if (!isValidTimeout) {
5852 LOGD("Blocking on conditional wait");
5853 pthread_cond_wait(&mRequestCond, &mMutex);
5854 }
5855 else {
5856 LOGD("Blocking on timed conditional wait");
5857 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5858 if (rc == ETIMEDOUT) {
5859 rc = -ENODEV;
5860 LOGE("Unblocked on timeout!!!!");
5861 break;
5862 }
5863 }
5864 LOGD("Unblocked");
5865 if (mWokenUpByDaemon) {
5866 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005867 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005868 break;
5869 }
5870 }
5871 pthread_mutex_unlock(&mMutex);
5872
5873 return rc;
5874}
5875
5876/*===========================================================================
5877 * FUNCTION : dump
5878 *
5879 * DESCRIPTION:
5880 *
5881 * PARAMETERS :
5882 *
5883 *
5884 * RETURN :
5885 *==========================================================================*/
5886void QCamera3HardwareInterface::dump(int fd)
5887{
5888 pthread_mutex_lock(&mMutex);
5889 dprintf(fd, "\n Camera HAL3 information Begin \n");
5890
5891 dprintf(fd, "\nNumber of pending requests: %zu \n",
5892 mPendingRequestsList.size());
5893 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5894 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5895 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5896 for(pendingRequestIterator i = mPendingRequestsList.begin();
5897 i != mPendingRequestsList.end(); i++) {
5898 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5899 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5900 i->input_buffer);
5901 }
5902 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5903 mPendingBuffersMap.get_num_overall_buffers());
5904 dprintf(fd, "-------+------------------\n");
5905 dprintf(fd, " Frame | Stream type mask \n");
5906 dprintf(fd, "-------+------------------\n");
5907 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5908 for(auto &j : req.mPendingBufferList) {
5909 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5910 dprintf(fd, " %5d | %11d \n",
5911 req.frame_number, channel->getStreamTypeMask());
5912 }
5913 }
5914 dprintf(fd, "-------+------------------\n");
5915
5916 dprintf(fd, "\nPending frame drop list: %zu\n",
5917 mPendingFrameDropList.size());
5918 dprintf(fd, "-------+-----------\n");
5919 dprintf(fd, " Frame | Stream ID \n");
5920 dprintf(fd, "-------+-----------\n");
5921 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5922 i != mPendingFrameDropList.end(); i++) {
5923 dprintf(fd, " %5d | %9d \n",
5924 i->frame_number, i->stream_ID);
5925 }
5926 dprintf(fd, "-------+-----------\n");
5927
5928 dprintf(fd, "\n Camera HAL3 information End \n");
5929
5930 /* use dumpsys media.camera as trigger to send update debug level event */
5931 mUpdateDebugLevel = true;
5932 pthread_mutex_unlock(&mMutex);
5933 return;
5934}
5935
5936/*===========================================================================
5937 * FUNCTION : flush
5938 *
5939 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5940 * conditionally restarts channels
5941 *
5942 * PARAMETERS :
5943 * @ restartChannels: re-start all channels
5944 *
5945 *
5946 * RETURN :
5947 * 0 on success
5948 * Error code on failure
5949 *==========================================================================*/
5950int QCamera3HardwareInterface::flush(bool restartChannels)
5951{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005952 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005953 int32_t rc = NO_ERROR;
5954
5955 LOGD("Unblocking Process Capture Request");
5956 pthread_mutex_lock(&mMutex);
5957 mFlush = true;
5958 pthread_mutex_unlock(&mMutex);
5959
5960 rc = stopAllChannels();
5961 // unlink of dualcam
5962 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005963 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5964 &m_pDualCamCmdPtr->bundle_info;
5965 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005966 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5967 pthread_mutex_lock(&gCamLock);
5968
5969 if (mIsMainCamera == 1) {
5970 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5971 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005972 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005973 // related session id should be session id of linked session
5974 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5975 } else {
5976 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5977 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005978 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005979 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5980 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005981 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005982 pthread_mutex_unlock(&gCamLock);
5983
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005984 rc = mCameraHandle->ops->set_dual_cam_cmd(
5985 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005986 if (rc < 0) {
5987 LOGE("Dualcam: Unlink failed, but still proceed to close");
5988 }
5989 }
5990
5991 if (rc < 0) {
5992 LOGE("stopAllChannels failed");
5993 return rc;
5994 }
5995 if (mChannelHandle) {
5996 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5997 mChannelHandle);
5998 }
5999
6000 // Reset bundle info
6001 rc = setBundleInfo();
6002 if (rc < 0) {
6003 LOGE("setBundleInfo failed %d", rc);
6004 return rc;
6005 }
6006
6007 // Mutex Lock
6008 pthread_mutex_lock(&mMutex);
6009
6010 // Unblock process_capture_request
6011 mPendingLiveRequest = 0;
6012 pthread_cond_signal(&mRequestCond);
6013
6014 rc = notifyErrorForPendingRequests();
6015 if (rc < 0) {
6016 LOGE("notifyErrorForPendingRequests failed");
6017 pthread_mutex_unlock(&mMutex);
6018 return rc;
6019 }
6020
6021 mFlush = false;
6022
6023 // Start the Streams/Channels
6024 if (restartChannels) {
6025 rc = startAllChannels();
6026 if (rc < 0) {
6027 LOGE("startAllChannels failed");
6028 pthread_mutex_unlock(&mMutex);
6029 return rc;
6030 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006031 if (mChannelHandle) {
6032 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6033 mChannelHandle);
6034 if (rc < 0) {
6035 LOGE("start_channel failed");
6036 pthread_mutex_unlock(&mMutex);
6037 return rc;
6038 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006039 }
6040 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006041 pthread_mutex_unlock(&mMutex);
6042
6043 return 0;
6044}
6045
6046/*===========================================================================
6047 * FUNCTION : flushPerf
6048 *
6049 * DESCRIPTION: This is the performance optimization version of flush that does
6050 * not use stream off, rather flushes the system
6051 *
6052 * PARAMETERS :
6053 *
6054 *
6055 * RETURN : 0 : success
6056 * -EINVAL: input is malformed (device is not valid)
6057 * -ENODEV: if the device has encountered a serious error
6058 *==========================================================================*/
6059int QCamera3HardwareInterface::flushPerf()
6060{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006061 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006062 int32_t rc = 0;
6063 struct timespec timeout;
6064 bool timed_wait = false;
6065
6066 pthread_mutex_lock(&mMutex);
6067 mFlushPerf = true;
6068 mPendingBuffersMap.numPendingBufsAtFlush =
6069 mPendingBuffersMap.get_num_overall_buffers();
6070 LOGD("Calling flush. Wait for %d buffers to return",
6071 mPendingBuffersMap.numPendingBufsAtFlush);
6072
6073 /* send the flush event to the backend */
6074 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6075 if (rc < 0) {
6076 LOGE("Error in flush: IOCTL failure");
6077 mFlushPerf = false;
6078 pthread_mutex_unlock(&mMutex);
6079 return -ENODEV;
6080 }
6081
6082 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6083 LOGD("No pending buffers in HAL, return flush");
6084 mFlushPerf = false;
6085 pthread_mutex_unlock(&mMutex);
6086 return rc;
6087 }
6088
6089 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006090 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006091 if (rc < 0) {
6092 LOGE("Error reading the real time clock, cannot use timed wait");
6093 } else {
6094 timeout.tv_sec += FLUSH_TIMEOUT;
6095 timed_wait = true;
6096 }
6097
6098 //Block on conditional variable
6099 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6100 LOGD("Waiting on mBuffersCond");
6101 if (!timed_wait) {
6102 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6103 if (rc != 0) {
6104 LOGE("pthread_cond_wait failed due to rc = %s",
6105 strerror(rc));
6106 break;
6107 }
6108 } else {
6109 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6110 if (rc != 0) {
6111 LOGE("pthread_cond_timedwait failed due to rc = %s",
6112 strerror(rc));
6113 break;
6114 }
6115 }
6116 }
6117 if (rc != 0) {
6118 mFlushPerf = false;
6119 pthread_mutex_unlock(&mMutex);
6120 return -ENODEV;
6121 }
6122
6123 LOGD("Received buffers, now safe to return them");
6124
6125 //make sure the channels handle flush
6126 //currently only required for the picture channel to release snapshot resources
6127 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6128 it != mStreamInfo.end(); it++) {
6129 QCamera3Channel *channel = (*it)->channel;
6130 if (channel) {
6131 rc = channel->flush();
6132 if (rc) {
6133 LOGE("Flushing the channels failed with error %d", rc);
6134 // even though the channel flush failed we need to continue and
6135 // return the buffers we have to the framework, however the return
6136 // value will be an error
6137 rc = -ENODEV;
6138 }
6139 }
6140 }
6141
6142 /* notify the frameworks and send errored results */
6143 rc = notifyErrorForPendingRequests();
6144 if (rc < 0) {
6145 LOGE("notifyErrorForPendingRequests failed");
6146 pthread_mutex_unlock(&mMutex);
6147 return rc;
6148 }
6149
6150 //unblock process_capture_request
6151 mPendingLiveRequest = 0;
6152 unblockRequestIfNecessary();
6153
6154 mFlushPerf = false;
6155 pthread_mutex_unlock(&mMutex);
6156 LOGD ("Flush Operation complete. rc = %d", rc);
6157 return rc;
6158}
6159
6160/*===========================================================================
6161 * FUNCTION : handleCameraDeviceError
6162 *
6163 * DESCRIPTION: This function calls internal flush and notifies the error to
6164 * framework and updates the state variable.
6165 *
6166 * PARAMETERS : None
6167 *
6168 * RETURN : NO_ERROR on Success
6169 * Error code on failure
6170 *==========================================================================*/
6171int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6172{
6173 int32_t rc = NO_ERROR;
6174
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006175 {
6176 Mutex::Autolock lock(mFlushLock);
6177 pthread_mutex_lock(&mMutex);
6178 if (mState != ERROR) {
6179 //if mState != ERROR, nothing to be done
6180 pthread_mutex_unlock(&mMutex);
6181 return NO_ERROR;
6182 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006184
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006185 rc = flush(false /* restart channels */);
6186 if (NO_ERROR != rc) {
6187 LOGE("internal flush to handle mState = ERROR failed");
6188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006189
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006190 pthread_mutex_lock(&mMutex);
6191 mState = DEINIT;
6192 pthread_mutex_unlock(&mMutex);
6193 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006194
6195 camera3_notify_msg_t notify_msg;
6196 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6197 notify_msg.type = CAMERA3_MSG_ERROR;
6198 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6199 notify_msg.message.error.error_stream = NULL;
6200 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006201 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006202
6203 return rc;
6204}
6205
6206/*===========================================================================
6207 * FUNCTION : captureResultCb
6208 *
6209 * DESCRIPTION: Callback handler for all capture result
6210 * (streams, as well as metadata)
6211 *
6212 * PARAMETERS :
6213 * @metadata : metadata information
6214 * @buffer : actual gralloc buffer to be returned to frameworks.
6215 * NULL if metadata.
6216 *
6217 * RETURN : NONE
6218 *==========================================================================*/
6219void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6220 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6221{
6222 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006223 pthread_mutex_lock(&mMutex);
6224 uint8_t batchSize = mBatchSize;
6225 pthread_mutex_unlock(&mMutex);
6226 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006227 handleBatchMetadata(metadata_buf,
6228 true /* free_and_bufdone_meta_buf */);
6229 } else { /* mBatchSize = 0 */
6230 hdrPlusPerfLock(metadata_buf);
6231 pthread_mutex_lock(&mMutex);
6232 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006233 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006234 true /* last urgent frame of batch metadata */,
6235 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006236 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 pthread_mutex_unlock(&mMutex);
6238 }
6239 } else if (isInputBuffer) {
6240 pthread_mutex_lock(&mMutex);
6241 handleInputBufferWithLock(frame_number);
6242 pthread_mutex_unlock(&mMutex);
6243 } else {
6244 pthread_mutex_lock(&mMutex);
6245 handleBufferWithLock(buffer, frame_number);
6246 pthread_mutex_unlock(&mMutex);
6247 }
6248 return;
6249}
6250
6251/*===========================================================================
6252 * FUNCTION : getReprocessibleOutputStreamId
6253 *
6254 * DESCRIPTION: Get source output stream id for the input reprocess stream
6255 * based on size and format, which would be the largest
6256 * output stream if an input stream exists.
6257 *
6258 * PARAMETERS :
6259 * @id : return the stream id if found
6260 *
6261 * RETURN : int32_t type of status
6262 * NO_ERROR -- success
6263 * none-zero failure code
6264 *==========================================================================*/
6265int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6266{
6267 /* check if any output or bidirectional stream with the same size and format
6268 and return that stream */
6269 if ((mInputStreamInfo.dim.width > 0) &&
6270 (mInputStreamInfo.dim.height > 0)) {
6271 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6272 it != mStreamInfo.end(); it++) {
6273
6274 camera3_stream_t *stream = (*it)->stream;
6275 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6276 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6277 (stream->format == mInputStreamInfo.format)) {
6278 // Usage flag for an input stream and the source output stream
6279 // may be different.
6280 LOGD("Found reprocessible output stream! %p", *it);
6281 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6282 stream->usage, mInputStreamInfo.usage);
6283
6284 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6285 if (channel != NULL && channel->mStreams[0]) {
6286 id = channel->mStreams[0]->getMyServerID();
6287 return NO_ERROR;
6288 }
6289 }
6290 }
6291 } else {
6292 LOGD("No input stream, so no reprocessible output stream");
6293 }
6294 return NAME_NOT_FOUND;
6295}
6296
6297/*===========================================================================
6298 * FUNCTION : lookupFwkName
6299 *
6300 * DESCRIPTION: In case the enum is not same in fwk and backend
6301 * make sure the parameter is correctly propogated
6302 *
6303 * PARAMETERS :
6304 * @arr : map between the two enums
6305 * @len : len of the map
6306 * @hal_name : name of the hal_parm to map
6307 *
6308 * RETURN : int type of status
6309 * fwk_name -- success
6310 * none-zero failure code
6311 *==========================================================================*/
6312template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6313 size_t len, halType hal_name)
6314{
6315
6316 for (size_t i = 0; i < len; i++) {
6317 if (arr[i].hal_name == hal_name) {
6318 return arr[i].fwk_name;
6319 }
6320 }
6321
6322 /* Not able to find matching framework type is not necessarily
6323 * an error case. This happens when mm-camera supports more attributes
6324 * than the frameworks do */
6325 LOGH("Cannot find matching framework type");
6326 return NAME_NOT_FOUND;
6327}
6328
6329/*===========================================================================
6330 * FUNCTION : lookupHalName
6331 *
6332 * DESCRIPTION: In case the enum is not same in fwk and backend
6333 * make sure the parameter is correctly propogated
6334 *
6335 * PARAMETERS :
6336 * @arr : map between the two enums
6337 * @len : len of the map
6338 * @fwk_name : name of the hal_parm to map
6339 *
6340 * RETURN : int32_t type of status
6341 * hal_name -- success
6342 * none-zero failure code
6343 *==========================================================================*/
6344template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6345 size_t len, fwkType fwk_name)
6346{
6347 for (size_t i = 0; i < len; i++) {
6348 if (arr[i].fwk_name == fwk_name) {
6349 return arr[i].hal_name;
6350 }
6351 }
6352
6353 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6354 return NAME_NOT_FOUND;
6355}
6356
6357/*===========================================================================
6358 * FUNCTION : lookupProp
6359 *
6360 * DESCRIPTION: lookup a value by its name
6361 *
6362 * PARAMETERS :
6363 * @arr : map between the two enums
6364 * @len : size of the map
6365 * @name : name to be looked up
6366 *
6367 * RETURN : Value if found
6368 * CAM_CDS_MODE_MAX if not found
6369 *==========================================================================*/
6370template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6371 size_t len, const char *name)
6372{
6373 if (name) {
6374 for (size_t i = 0; i < len; i++) {
6375 if (!strcmp(arr[i].desc, name)) {
6376 return arr[i].val;
6377 }
6378 }
6379 }
6380 return CAM_CDS_MODE_MAX;
6381}
6382
6383/*===========================================================================
6384 *
6385 * DESCRIPTION:
6386 *
6387 * PARAMETERS :
6388 * @metadata : metadata information from callback
6389 * @timestamp: metadata buffer timestamp
6390 * @request_id: request id
6391 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006392 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006393 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6394 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006396 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6397 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006398 *
6399 * RETURN : camera_metadata_t*
6400 * metadata in a format specified by fwk
6401 *==========================================================================*/
6402camera_metadata_t*
6403QCamera3HardwareInterface::translateFromHalMetadata(
6404 metadata_buffer_t *metadata,
6405 nsecs_t timestamp,
6406 int32_t request_id,
6407 const CameraMetadata& jpegMetadata,
6408 uint8_t pipeline_depth,
6409 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006410 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006411 /* DevCamDebug metadata translateFromHalMetadata argument */
6412 uint8_t DevCamDebug_meta_enable,
6413 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006414 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006415 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006416 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006417{
6418 CameraMetadata camMetadata;
6419 camera_metadata_t *resultMetadata;
6420
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006421 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006422 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6423 * Timestamp is needed because it's used for shutter notify calculation.
6424 * */
6425 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6426 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006427 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006428 }
6429
Thierry Strudel3d639192016-09-09 11:52:26 -07006430 if (jpegMetadata.entryCount())
6431 camMetadata.append(jpegMetadata);
6432
6433 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6434 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6435 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6436 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006437 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006438 if (mBatchSize == 0) {
6439 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6440 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6441 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006442
Samuel Ha68ba5172016-12-15 18:41:12 -08006443 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6444 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6445 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6446 // DevCamDebug metadata translateFromHalMetadata AF
6447 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6448 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6449 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6450 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6451 }
6452 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6453 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6454 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6455 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6456 }
6457 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6458 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6459 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6460 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6461 }
6462 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6463 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6464 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6465 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6466 }
6467 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6468 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6469 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6470 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6471 }
6472 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6473 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6474 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6475 *DevCamDebug_af_monitor_pdaf_target_pos;
6476 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6477 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6478 }
6479 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6480 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6481 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6482 *DevCamDebug_af_monitor_pdaf_confidence;
6483 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6484 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6485 }
6486 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6487 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6488 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6489 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6490 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6491 }
6492 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6493 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6494 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6495 *DevCamDebug_af_monitor_tof_target_pos;
6496 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6497 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6498 }
6499 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6500 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6501 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6502 *DevCamDebug_af_monitor_tof_confidence;
6503 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6504 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6505 }
6506 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6507 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6508 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6509 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6510 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6511 }
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6513 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6514 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6515 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6516 &fwk_DevCamDebug_af_monitor_type_select, 1);
6517 }
6518 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6519 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6520 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6521 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6522 &fwk_DevCamDebug_af_monitor_refocus, 1);
6523 }
6524 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6525 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6526 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6527 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6528 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6529 }
6530 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6531 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6532 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6533 *DevCamDebug_af_search_pdaf_target_pos;
6534 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6535 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6536 }
6537 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6538 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6539 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6540 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6541 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6542 }
6543 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6544 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6545 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6546 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6547 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6548 }
6549 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6550 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6551 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6552 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6553 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6554 }
6555 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6556 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6557 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6558 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6559 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6560 }
6561 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6562 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6563 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6564 *DevCamDebug_af_search_tof_target_pos;
6565 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6566 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6567 }
6568 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6569 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6570 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6571 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6572 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6575 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6576 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6577 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6578 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6579 }
6580 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6581 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6582 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6583 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6584 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6585 }
6586 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6587 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6588 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6589 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6590 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6591 }
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6593 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6594 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6595 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6596 &fwk_DevCamDebug_af_search_type_select, 1);
6597 }
6598 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6599 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6600 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6601 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6602 &fwk_DevCamDebug_af_search_next_pos, 1);
6603 }
6604 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6605 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6606 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6607 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6608 &fwk_DevCamDebug_af_search_target_pos, 1);
6609 }
6610 // DevCamDebug metadata translateFromHalMetadata AEC
6611 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6612 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6613 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6614 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6617 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6618 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6619 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6622 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6623 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6624 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6627 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6628 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6629 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6632 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6633 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6634 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6635 }
6636 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6637 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6638 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6639 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6642 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6643 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6644 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6645 }
6646 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6647 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6648 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6649 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6650 }
Samuel Ha34229982017-02-17 13:51:11 -08006651 // DevCamDebug metadata translateFromHalMetadata zzHDR
6652 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6653 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6654 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6655 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6658 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6659 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6660 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6661 }
6662 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6663 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6664 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6665 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6666 }
6667 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6668 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6669 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6670 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6671 }
6672 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6673 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6674 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6675 *DevCamDebug_aec_hdr_sensitivity_ratio;
6676 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6677 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6678 }
6679 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6680 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6681 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6682 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6683 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6684 }
6685 // DevCamDebug metadata translateFromHalMetadata ADRC
6686 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6687 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6688 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6689 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6690 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6691 }
6692 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6693 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6694 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6695 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6696 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6697 }
6698 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6699 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6700 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6701 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6702 }
6703 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6704 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6705 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6706 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6707 }
6708 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6709 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6710 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6711 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6712 }
6713 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6714 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6715 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6716 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6717 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006718 // DevCamDebug metadata translateFromHalMetadata AWB
6719 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6720 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6721 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6722 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6723 }
6724 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6725 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6726 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6727 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6728 }
6729 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6730 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6731 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6732 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6733 }
6734 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6735 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6736 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6737 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6740 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6741 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6742 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6743 }
6744 }
6745 // atrace_end(ATRACE_TAG_ALWAYS);
6746
Thierry Strudel3d639192016-09-09 11:52:26 -07006747 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6748 int64_t fwk_frame_number = *frame_number;
6749 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6750 }
6751
6752 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6753 int32_t fps_range[2];
6754 fps_range[0] = (int32_t)float_range->min_fps;
6755 fps_range[1] = (int32_t)float_range->max_fps;
6756 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6757 fps_range, 2);
6758 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6759 fps_range[0], fps_range[1]);
6760 }
6761
6762 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6763 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6764 }
6765
6766 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6767 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6768 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6769 *sceneMode);
6770 if (NAME_NOT_FOUND != val) {
6771 uint8_t fwkSceneMode = (uint8_t)val;
6772 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6773 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6774 fwkSceneMode);
6775 }
6776 }
6777
6778 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6779 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6780 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6781 }
6782
6783 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6784 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6785 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6786 }
6787
6788 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6789 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6790 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6791 }
6792
6793 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6794 CAM_INTF_META_EDGE_MODE, metadata) {
6795 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6796 }
6797
6798 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6799 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6800 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6801 }
6802
6803 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6804 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6805 }
6806
6807 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6808 if (0 <= *flashState) {
6809 uint8_t fwk_flashState = (uint8_t) *flashState;
6810 if (!gCamCapability[mCameraId]->flash_available) {
6811 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6812 }
6813 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6814 }
6815 }
6816
6817 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6818 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6819 if (NAME_NOT_FOUND != val) {
6820 uint8_t fwk_flashMode = (uint8_t)val;
6821 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6822 }
6823 }
6824
6825 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6826 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6827 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6828 }
6829
6830 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6831 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6832 }
6833
6834 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6835 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6836 }
6837
6838 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6839 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6840 }
6841
6842 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6843 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6844 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6845 }
6846
6847 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6848 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6849 LOGD("fwk_videoStab = %d", fwk_videoStab);
6850 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6851 } else {
6852 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6853 // and so hardcoding the Video Stab result to OFF mode.
6854 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6855 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006856 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006857 }
6858
6859 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6860 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6861 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6862 }
6863
6864 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6865 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6866 }
6867
Thierry Strudel3d639192016-09-09 11:52:26 -07006868 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6869 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006870 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006871
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006872 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6873 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006874
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006875 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006876 blackLevelAppliedPattern->cam_black_level[0],
6877 blackLevelAppliedPattern->cam_black_level[1],
6878 blackLevelAppliedPattern->cam_black_level[2],
6879 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006880 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6881 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006882
6883#ifndef USE_HAL_3_3
6884 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006885 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6886 // depth space.
6887 fwk_blackLevelInd[0] /= 4.0;
6888 fwk_blackLevelInd[1] /= 4.0;
6889 fwk_blackLevelInd[2] /= 4.0;
6890 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006891 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6892 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006893#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006894 }
6895
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006896#ifndef USE_HAL_3_3
6897 // Fixed whitelevel is used by ISP/Sensor
6898 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6899 &gCamCapability[mCameraId]->white_level, 1);
6900#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006901
6902 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6903 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6904 int32_t scalerCropRegion[4];
6905 scalerCropRegion[0] = hScalerCropRegion->left;
6906 scalerCropRegion[1] = hScalerCropRegion->top;
6907 scalerCropRegion[2] = hScalerCropRegion->width;
6908 scalerCropRegion[3] = hScalerCropRegion->height;
6909
6910 // Adjust crop region from sensor output coordinate system to active
6911 // array coordinate system.
6912 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6913 scalerCropRegion[2], scalerCropRegion[3]);
6914
6915 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6916 }
6917
6918 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6919 LOGD("sensorExpTime = %lld", *sensorExpTime);
6920 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6921 }
6922
6923 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6924 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6925 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6926 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6927 }
6928
6929 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6930 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6931 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6932 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6933 sensorRollingShutterSkew, 1);
6934 }
6935
6936 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6937 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6938 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6939
6940 //calculate the noise profile based on sensitivity
6941 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6942 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6943 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6944 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6945 noise_profile[i] = noise_profile_S;
6946 noise_profile[i+1] = noise_profile_O;
6947 }
6948 LOGD("noise model entry (S, O) is (%f, %f)",
6949 noise_profile_S, noise_profile_O);
6950 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6951 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6952 }
6953
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006954#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006955 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006956 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006957 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006958 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006959 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6960 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6961 }
6962 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006963#endif
6964
Thierry Strudel3d639192016-09-09 11:52:26 -07006965 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6966 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6967 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6968 }
6969
6970 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6971 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6972 *faceDetectMode);
6973 if (NAME_NOT_FOUND != val) {
6974 uint8_t fwk_faceDetectMode = (uint8_t)val;
6975 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6976
6977 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6978 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6979 CAM_INTF_META_FACE_DETECTION, metadata) {
6980 uint8_t numFaces = MIN(
6981 faceDetectionInfo->num_faces_detected, MAX_ROI);
6982 int32_t faceIds[MAX_ROI];
6983 uint8_t faceScores[MAX_ROI];
6984 int32_t faceRectangles[MAX_ROI * 4];
6985 int32_t faceLandmarks[MAX_ROI * 6];
6986 size_t j = 0, k = 0;
6987
6988 for (size_t i = 0; i < numFaces; i++) {
6989 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6990 // Adjust crop region from sensor output coordinate system to active
6991 // array coordinate system.
6992 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6993 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6994 rect.width, rect.height);
6995
6996 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6997 faceRectangles+j, -1);
6998
6999 j+= 4;
7000 }
7001 if (numFaces <= 0) {
7002 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7003 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7004 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7005 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7006 }
7007
7008 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7009 numFaces);
7010 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7011 faceRectangles, numFaces * 4U);
7012 if (fwk_faceDetectMode ==
7013 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7014 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7015 CAM_INTF_META_FACE_LANDMARK, metadata) {
7016
7017 for (size_t i = 0; i < numFaces; i++) {
7018 // Map the co-ordinate sensor output coordinate system to active
7019 // array coordinate system.
7020 mCropRegionMapper.toActiveArray(
7021 landmarks->face_landmarks[i].left_eye_center.x,
7022 landmarks->face_landmarks[i].left_eye_center.y);
7023 mCropRegionMapper.toActiveArray(
7024 landmarks->face_landmarks[i].right_eye_center.x,
7025 landmarks->face_landmarks[i].right_eye_center.y);
7026 mCropRegionMapper.toActiveArray(
7027 landmarks->face_landmarks[i].mouth_center.x,
7028 landmarks->face_landmarks[i].mouth_center.y);
7029
7030 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007031 k+= TOTAL_LANDMARK_INDICES;
7032 }
7033 } else {
7034 for (size_t i = 0; i < numFaces; i++) {
7035 setInvalidLandmarks(faceLandmarks+k);
7036 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007037 }
7038 }
7039
7040 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7041 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7042 faceLandmarks, numFaces * 6U);
7043 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007044 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7045 CAM_INTF_META_FACE_BLINK, metadata) {
7046 uint8_t detected[MAX_ROI];
7047 uint8_t degree[MAX_ROI * 2];
7048 for (size_t i = 0; i < numFaces; i++) {
7049 detected[i] = blinks->blink[i].blink_detected;
7050 degree[2 * i] = blinks->blink[i].left_blink;
7051 degree[2 * i + 1] = blinks->blink[i].right_blink;
7052 }
7053 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7054 detected, numFaces);
7055 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7056 degree, numFaces * 2);
7057 }
7058 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7059 CAM_INTF_META_FACE_SMILE, metadata) {
7060 uint8_t degree[MAX_ROI];
7061 uint8_t confidence[MAX_ROI];
7062 for (size_t i = 0; i < numFaces; i++) {
7063 degree[i] = smiles->smile[i].smile_degree;
7064 confidence[i] = smiles->smile[i].smile_confidence;
7065 }
7066 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7067 degree, numFaces);
7068 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7069 confidence, numFaces);
7070 }
7071 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7072 CAM_INTF_META_FACE_GAZE, metadata) {
7073 int8_t angle[MAX_ROI];
7074 int32_t direction[MAX_ROI * 3];
7075 int8_t degree[MAX_ROI * 2];
7076 for (size_t i = 0; i < numFaces; i++) {
7077 angle[i] = gazes->gaze[i].gaze_angle;
7078 direction[3 * i] = gazes->gaze[i].updown_dir;
7079 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7080 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7081 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7082 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7083 }
7084 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7085 (uint8_t *)angle, numFaces);
7086 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7087 direction, numFaces * 3);
7088 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7089 (uint8_t *)degree, numFaces * 2);
7090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007091 }
7092 }
7093 }
7094 }
7095
7096 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7097 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007098 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007099 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007100 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007101
Shuzhen Wang14415f52016-11-16 18:26:18 -08007102 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7103 histogramBins = *histBins;
7104 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7105 }
7106
7107 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007108 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7109 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007110 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007111
7112 switch (stats_data->type) {
7113 case CAM_HISTOGRAM_TYPE_BAYER:
7114 switch (stats_data->bayer_stats.data_type) {
7115 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007116 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7117 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007118 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007119 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7120 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007121 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007122 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7123 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007125 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007126 case CAM_STATS_CHANNEL_R:
7127 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007128 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7129 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130 }
7131 break;
7132 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007133 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 break;
7135 }
7136
Shuzhen Wang14415f52016-11-16 18:26:18 -08007137 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007138 }
7139 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007140 }
7141
7142 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7143 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7144 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7145 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7146 }
7147
7148 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7149 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7150 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7151 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7152 }
7153
7154 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7155 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7156 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7157 CAM_MAX_SHADING_MAP_HEIGHT);
7158 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7159 CAM_MAX_SHADING_MAP_WIDTH);
7160 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7161 lensShadingMap->lens_shading, 4U * map_width * map_height);
7162 }
7163
7164 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7165 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7166 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7167 }
7168
7169 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7170 //Populate CAM_INTF_META_TONEMAP_CURVES
7171 /* ch0 = G, ch 1 = B, ch 2 = R*/
7172 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7173 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7174 tonemap->tonemap_points_cnt,
7175 CAM_MAX_TONEMAP_CURVE_SIZE);
7176 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7177 }
7178
7179 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7180 &tonemap->curves[0].tonemap_points[0][0],
7181 tonemap->tonemap_points_cnt * 2);
7182
7183 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7184 &tonemap->curves[1].tonemap_points[0][0],
7185 tonemap->tonemap_points_cnt * 2);
7186
7187 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7188 &tonemap->curves[2].tonemap_points[0][0],
7189 tonemap->tonemap_points_cnt * 2);
7190 }
7191
7192 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7193 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7194 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7195 CC_GAIN_MAX);
7196 }
7197
7198 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7199 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7200 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7201 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7202 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7203 }
7204
7205 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7206 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7207 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7208 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7209 toneCurve->tonemap_points_cnt,
7210 CAM_MAX_TONEMAP_CURVE_SIZE);
7211 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7212 }
7213 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7214 (float*)toneCurve->curve.tonemap_points,
7215 toneCurve->tonemap_points_cnt * 2);
7216 }
7217
7218 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7219 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7220 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7221 predColorCorrectionGains->gains, 4);
7222 }
7223
7224 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7225 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7226 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7227 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7228 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7229 }
7230
7231 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7232 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7233 }
7234
7235 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7236 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7237 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7238 }
7239
7240 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7241 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7242 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7243 }
7244
7245 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7246 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7247 *effectMode);
7248 if (NAME_NOT_FOUND != val) {
7249 uint8_t fwk_effectMode = (uint8_t)val;
7250 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7251 }
7252 }
7253
7254 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7255 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7256 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7257 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7258 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7259 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7260 }
7261 int32_t fwk_testPatternData[4];
7262 fwk_testPatternData[0] = testPatternData->r;
7263 fwk_testPatternData[3] = testPatternData->b;
7264 switch (gCamCapability[mCameraId]->color_arrangement) {
7265 case CAM_FILTER_ARRANGEMENT_RGGB:
7266 case CAM_FILTER_ARRANGEMENT_GRBG:
7267 fwk_testPatternData[1] = testPatternData->gr;
7268 fwk_testPatternData[2] = testPatternData->gb;
7269 break;
7270 case CAM_FILTER_ARRANGEMENT_GBRG:
7271 case CAM_FILTER_ARRANGEMENT_BGGR:
7272 fwk_testPatternData[2] = testPatternData->gr;
7273 fwk_testPatternData[1] = testPatternData->gb;
7274 break;
7275 default:
7276 LOGE("color arrangement %d is not supported",
7277 gCamCapability[mCameraId]->color_arrangement);
7278 break;
7279 }
7280 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7281 }
7282
7283 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7284 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7285 }
7286
7287 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7288 String8 str((const char *)gps_methods);
7289 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7290 }
7291
7292 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7293 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7294 }
7295
7296 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7297 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7298 }
7299
7300 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7301 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7302 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7303 }
7304
7305 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7306 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7307 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7308 }
7309
7310 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7311 int32_t fwk_thumb_size[2];
7312 fwk_thumb_size[0] = thumb_size->width;
7313 fwk_thumb_size[1] = thumb_size->height;
7314 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7315 }
7316
7317 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7318 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7319 privateData,
7320 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7321 }
7322
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007323 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007324 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007325 meteringMode, 1);
7326 }
7327
Thierry Strudel54dc9782017-02-15 12:12:10 -08007328 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7329 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7330 LOGD("hdr_scene_data: %d %f\n",
7331 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7332 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7333 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7334 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7335 &isHdr, 1);
7336 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7337 &isHdrConfidence, 1);
7338 }
7339
7340
7341
Thierry Strudel3d639192016-09-09 11:52:26 -07007342 if (metadata->is_tuning_params_valid) {
7343 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7344 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7345 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7346
7347
7348 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7349 sizeof(uint32_t));
7350 data += sizeof(uint32_t);
7351
7352 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7353 sizeof(uint32_t));
7354 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7355 data += sizeof(uint32_t);
7356
7357 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7358 sizeof(uint32_t));
7359 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7360 data += sizeof(uint32_t);
7361
7362 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7363 sizeof(uint32_t));
7364 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7365 data += sizeof(uint32_t);
7366
7367 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7368 sizeof(uint32_t));
7369 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7370 data += sizeof(uint32_t);
7371
7372 metadata->tuning_params.tuning_mod3_data_size = 0;
7373 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7374 sizeof(uint32_t));
7375 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7376 data += sizeof(uint32_t);
7377
7378 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7379 TUNING_SENSOR_DATA_MAX);
7380 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7381 count);
7382 data += count;
7383
7384 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7385 TUNING_VFE_DATA_MAX);
7386 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7387 count);
7388 data += count;
7389
7390 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7391 TUNING_CPP_DATA_MAX);
7392 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7393 count);
7394 data += count;
7395
7396 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7397 TUNING_CAC_DATA_MAX);
7398 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7399 count);
7400 data += count;
7401
7402 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7403 (int32_t *)(void *)tuning_meta_data_blob,
7404 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7405 }
7406
7407 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7408 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7409 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7410 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7411 NEUTRAL_COL_POINTS);
7412 }
7413
7414 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7415 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7416 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7417 }
7418
7419 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7420 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7421 // Adjust crop region from sensor output coordinate system to active
7422 // array coordinate system.
7423 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7424 hAeRegions->rect.width, hAeRegions->rect.height);
7425
7426 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7427 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7428 REGIONS_TUPLE_COUNT);
7429 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7430 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7431 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7432 hAeRegions->rect.height);
7433 }
7434
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007435 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7436 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7437 if (NAME_NOT_FOUND != val) {
7438 uint8_t fwkAfMode = (uint8_t)val;
7439 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7440 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7441 } else {
7442 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7443 val);
7444 }
7445 }
7446
Thierry Strudel3d639192016-09-09 11:52:26 -07007447 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7448 uint8_t fwk_afState = (uint8_t) *afState;
7449 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007450 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007451 }
7452
7453 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7454 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7455 }
7456
7457 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7458 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7459 }
7460
7461 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7462 uint8_t fwk_lensState = *lensState;
7463 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7464 }
7465
7466 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7467 /*af regions*/
7468 int32_t afRegions[REGIONS_TUPLE_COUNT];
7469 // Adjust crop region from sensor output coordinate system to active
7470 // array coordinate system.
7471 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7472 hAfRegions->rect.width, hAfRegions->rect.height);
7473
7474 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7475 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7476 REGIONS_TUPLE_COUNT);
7477 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7478 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7479 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7480 hAfRegions->rect.height);
7481 }
7482
7483 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007484 uint32_t ab_mode = *hal_ab_mode;
7485 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7486 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7487 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007489 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007490 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007491 if (NAME_NOT_FOUND != val) {
7492 uint8_t fwk_ab_mode = (uint8_t)val;
7493 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7494 }
7495 }
7496
7497 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7498 int val = lookupFwkName(SCENE_MODES_MAP,
7499 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7500 if (NAME_NOT_FOUND != val) {
7501 uint8_t fwkBestshotMode = (uint8_t)val;
7502 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7503 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7504 } else {
7505 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7506 }
7507 }
7508
7509 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7510 uint8_t fwk_mode = (uint8_t) *mode;
7511 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7512 }
7513
7514 /* Constant metadata values to be update*/
7515 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7516 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7517
7518 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7519 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7520
7521 int32_t hotPixelMap[2];
7522 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7523
7524 // CDS
7525 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7526 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7527 }
7528
Thierry Strudel04e026f2016-10-10 11:27:36 -07007529 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7530 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007531 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007532 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7533 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7534 } else {
7535 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7536 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007537
7538 if(fwk_hdr != curr_hdr_state) {
7539 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7540 if(fwk_hdr)
7541 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7542 else
7543 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7544 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007545 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7546 }
7547
Thierry Strudel54dc9782017-02-15 12:12:10 -08007548 //binning correction
7549 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7550 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7551 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7552 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7553 }
7554
Thierry Strudel04e026f2016-10-10 11:27:36 -07007555 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007556 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007557 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7558 int8_t is_ir_on = 0;
7559
7560 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7561 if(is_ir_on != curr_ir_state) {
7562 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7563 if(is_ir_on)
7564 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7565 else
7566 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7567 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007568 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007569 }
7570
Thierry Strudel269c81a2016-10-12 12:13:59 -07007571 // AEC SPEED
7572 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7573 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7574 }
7575
7576 // AWB SPEED
7577 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7578 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7579 }
7580
Thierry Strudel3d639192016-09-09 11:52:26 -07007581 // TNR
7582 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7583 uint8_t tnr_enable = tnr->denoise_enable;
7584 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007585 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7586 int8_t is_tnr_on = 0;
7587
7588 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7589 if(is_tnr_on != curr_tnr_state) {
7590 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7591 if(is_tnr_on)
7592 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7593 else
7594 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007596
7597 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7598 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7599 }
7600
7601 // Reprocess crop data
7602 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7603 uint8_t cnt = crop_data->num_of_streams;
7604 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7605 // mm-qcamera-daemon only posts crop_data for streams
7606 // not linked to pproc. So no valid crop metadata is not
7607 // necessarily an error case.
7608 LOGD("No valid crop metadata entries");
7609 } else {
7610 uint32_t reproc_stream_id;
7611 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7612 LOGD("No reprocessible stream found, ignore crop data");
7613 } else {
7614 int rc = NO_ERROR;
7615 Vector<int32_t> roi_map;
7616 int32_t *crop = new int32_t[cnt*4];
7617 if (NULL == crop) {
7618 rc = NO_MEMORY;
7619 }
7620 if (NO_ERROR == rc) {
7621 int32_t streams_found = 0;
7622 for (size_t i = 0; i < cnt; i++) {
7623 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7624 if (pprocDone) {
7625 // HAL already does internal reprocessing,
7626 // either via reprocessing before JPEG encoding,
7627 // or offline postprocessing for pproc bypass case.
7628 crop[0] = 0;
7629 crop[1] = 0;
7630 crop[2] = mInputStreamInfo.dim.width;
7631 crop[3] = mInputStreamInfo.dim.height;
7632 } else {
7633 crop[0] = crop_data->crop_info[i].crop.left;
7634 crop[1] = crop_data->crop_info[i].crop.top;
7635 crop[2] = crop_data->crop_info[i].crop.width;
7636 crop[3] = crop_data->crop_info[i].crop.height;
7637 }
7638 roi_map.add(crop_data->crop_info[i].roi_map.left);
7639 roi_map.add(crop_data->crop_info[i].roi_map.top);
7640 roi_map.add(crop_data->crop_info[i].roi_map.width);
7641 roi_map.add(crop_data->crop_info[i].roi_map.height);
7642 streams_found++;
7643 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7644 crop[0], crop[1], crop[2], crop[3]);
7645 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7646 crop_data->crop_info[i].roi_map.left,
7647 crop_data->crop_info[i].roi_map.top,
7648 crop_data->crop_info[i].roi_map.width,
7649 crop_data->crop_info[i].roi_map.height);
7650 break;
7651
7652 }
7653 }
7654 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7655 &streams_found, 1);
7656 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7657 crop, (size_t)(streams_found * 4));
7658 if (roi_map.array()) {
7659 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7660 roi_map.array(), roi_map.size());
7661 }
7662 }
7663 if (crop) {
7664 delete [] crop;
7665 }
7666 }
7667 }
7668 }
7669
7670 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7671 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7672 // so hardcoding the CAC result to OFF mode.
7673 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7674 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7675 } else {
7676 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7677 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7678 *cacMode);
7679 if (NAME_NOT_FOUND != val) {
7680 uint8_t resultCacMode = (uint8_t)val;
7681 // check whether CAC result from CB is equal to Framework set CAC mode
7682 // If not equal then set the CAC mode came in corresponding request
7683 if (fwk_cacMode != resultCacMode) {
7684 resultCacMode = fwk_cacMode;
7685 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007686 //Check if CAC is disabled by property
7687 if (m_cacModeDisabled) {
7688 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7689 }
7690
Thierry Strudel3d639192016-09-09 11:52:26 -07007691 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7692 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7693 } else {
7694 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7695 }
7696 }
7697 }
7698
7699 // Post blob of cam_cds_data through vendor tag.
7700 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7701 uint8_t cnt = cdsInfo->num_of_streams;
7702 cam_cds_data_t cdsDataOverride;
7703 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7704 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7705 cdsDataOverride.num_of_streams = 1;
7706 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7707 uint32_t reproc_stream_id;
7708 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7709 LOGD("No reprocessible stream found, ignore cds data");
7710 } else {
7711 for (size_t i = 0; i < cnt; i++) {
7712 if (cdsInfo->cds_info[i].stream_id ==
7713 reproc_stream_id) {
7714 cdsDataOverride.cds_info[0].cds_enable =
7715 cdsInfo->cds_info[i].cds_enable;
7716 break;
7717 }
7718 }
7719 }
7720 } else {
7721 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7722 }
7723 camMetadata.update(QCAMERA3_CDS_INFO,
7724 (uint8_t *)&cdsDataOverride,
7725 sizeof(cam_cds_data_t));
7726 }
7727
7728 // Ldaf calibration data
7729 if (!mLdafCalibExist) {
7730 IF_META_AVAILABLE(uint32_t, ldafCalib,
7731 CAM_INTF_META_LDAF_EXIF, metadata) {
7732 mLdafCalibExist = true;
7733 mLdafCalib[0] = ldafCalib[0];
7734 mLdafCalib[1] = ldafCalib[1];
7735 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7736 ldafCalib[0], ldafCalib[1]);
7737 }
7738 }
7739
Thierry Strudel54dc9782017-02-15 12:12:10 -08007740 // EXIF debug data through vendor tag
7741 /*
7742 * Mobicat Mask can assume 3 values:
7743 * 1 refers to Mobicat data,
7744 * 2 refers to Stats Debug and Exif Debug Data
7745 * 3 refers to Mobicat and Stats Debug Data
7746 * We want to make sure that we are sending Exif debug data
7747 * only when Mobicat Mask is 2.
7748 */
7749 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7750 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7751 (uint8_t *)(void *)mExifParams.debug_params,
7752 sizeof(mm_jpeg_debug_exif_params_t));
7753 }
7754
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007755 // Reprocess and DDM debug data through vendor tag
7756 cam_reprocess_info_t repro_info;
7757 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007758 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7759 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007760 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007761 }
7762 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7763 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007764 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007765 }
7766 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7767 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007768 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007769 }
7770 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7771 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007772 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007773 }
7774 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7775 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007776 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007777 }
7778 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007779 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007780 }
7781 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7782 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007783 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007784 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007785 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7786 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7787 }
7788 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7789 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7790 }
7791 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7792 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007793
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007794 // INSTANT AEC MODE
7795 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7796 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7797 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7798 }
7799
Shuzhen Wange763e802016-03-31 10:24:29 -07007800 // AF scene change
7801 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7802 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7803 }
7804
Thierry Strudel3d639192016-09-09 11:52:26 -07007805 resultMetadata = camMetadata.release();
7806 return resultMetadata;
7807}
7808
7809/*===========================================================================
7810 * FUNCTION : saveExifParams
7811 *
7812 * DESCRIPTION:
7813 *
7814 * PARAMETERS :
7815 * @metadata : metadata information from callback
7816 *
7817 * RETURN : none
7818 *
7819 *==========================================================================*/
7820void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7821{
7822 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7823 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7824 if (mExifParams.debug_params) {
7825 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7826 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7827 }
7828 }
7829 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7830 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7831 if (mExifParams.debug_params) {
7832 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7833 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7834 }
7835 }
7836 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7837 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7838 if (mExifParams.debug_params) {
7839 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7840 mExifParams.debug_params->af_debug_params_valid = TRUE;
7841 }
7842 }
7843 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7844 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7845 if (mExifParams.debug_params) {
7846 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7847 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7848 }
7849 }
7850 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7851 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7852 if (mExifParams.debug_params) {
7853 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7854 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7855 }
7856 }
7857 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7858 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7859 if (mExifParams.debug_params) {
7860 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7861 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7862 }
7863 }
7864 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7865 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7866 if (mExifParams.debug_params) {
7867 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7868 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7869 }
7870 }
7871 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7872 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7873 if (mExifParams.debug_params) {
7874 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7875 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7876 }
7877 }
7878}
7879
7880/*===========================================================================
7881 * FUNCTION : get3AExifParams
7882 *
7883 * DESCRIPTION:
7884 *
7885 * PARAMETERS : none
7886 *
7887 *
7888 * RETURN : mm_jpeg_exif_params_t
7889 *
7890 *==========================================================================*/
7891mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7892{
7893 return mExifParams;
7894}
7895
7896/*===========================================================================
7897 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7898 *
7899 * DESCRIPTION:
7900 *
7901 * PARAMETERS :
7902 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007903 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7904 * urgent metadata in a batch. Always true for
7905 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007906 *
7907 * RETURN : camera_metadata_t*
7908 * metadata in a format specified by fwk
7909 *==========================================================================*/
7910camera_metadata_t*
7911QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007912 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007913{
7914 CameraMetadata camMetadata;
7915 camera_metadata_t *resultMetadata;
7916
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007917 if (!lastUrgentMetadataInBatch) {
7918 /* In batch mode, use empty metadata if this is not the last in batch
7919 */
7920 resultMetadata = allocate_camera_metadata(0, 0);
7921 return resultMetadata;
7922 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007923
7924 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7925 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7926 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7927 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7928 }
7929
7930 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7931 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7932 &aecTrigger->trigger, 1);
7933 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7934 &aecTrigger->trigger_id, 1);
7935 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7936 aecTrigger->trigger);
7937 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7938 aecTrigger->trigger_id);
7939 }
7940
7941 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7942 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7943 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7944 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7945 }
7946
Thierry Strudel3d639192016-09-09 11:52:26 -07007947 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7948 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7949 &af_trigger->trigger, 1);
7950 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7951 af_trigger->trigger);
7952 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7953 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7954 af_trigger->trigger_id);
7955 }
7956
7957 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7958 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7959 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7960 if (NAME_NOT_FOUND != val) {
7961 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7962 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7963 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7964 } else {
7965 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7966 }
7967 }
7968
7969 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7970 uint32_t aeMode = CAM_AE_MODE_MAX;
7971 int32_t flashMode = CAM_FLASH_MODE_MAX;
7972 int32_t redeye = -1;
7973 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7974 aeMode = *pAeMode;
7975 }
7976 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7977 flashMode = *pFlashMode;
7978 }
7979 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7980 redeye = *pRedeye;
7981 }
7982
7983 if (1 == redeye) {
7984 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7985 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7986 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7987 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7988 flashMode);
7989 if (NAME_NOT_FOUND != val) {
7990 fwk_aeMode = (uint8_t)val;
7991 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7992 } else {
7993 LOGE("Unsupported flash mode %d", flashMode);
7994 }
7995 } else if (aeMode == CAM_AE_MODE_ON) {
7996 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7997 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7998 } else if (aeMode == CAM_AE_MODE_OFF) {
7999 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8000 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8001 } else {
8002 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8003 "flashMode:%d, aeMode:%u!!!",
8004 redeye, flashMode, aeMode);
8005 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008006 if (mInstantAEC) {
8007 // Increment frame Idx count untill a bound reached for instant AEC.
8008 mInstantAecFrameIdxCount++;
8009 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8010 CAM_INTF_META_AEC_INFO, metadata) {
8011 LOGH("ae_params->settled = %d",ae_params->settled);
8012 // If AEC settled, or if number of frames reached bound value,
8013 // should reset instant AEC.
8014 if (ae_params->settled ||
8015 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8016 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8017 mInstantAEC = false;
8018 mResetInstantAEC = true;
8019 mInstantAecFrameIdxCount = 0;
8020 }
8021 }
8022 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008023 resultMetadata = camMetadata.release();
8024 return resultMetadata;
8025}
8026
8027/*===========================================================================
8028 * FUNCTION : dumpMetadataToFile
8029 *
8030 * DESCRIPTION: Dumps tuning metadata to file system
8031 *
8032 * PARAMETERS :
8033 * @meta : tuning metadata
8034 * @dumpFrameCount : current dump frame count
8035 * @enabled : Enable mask
8036 *
8037 *==========================================================================*/
8038void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8039 uint32_t &dumpFrameCount,
8040 bool enabled,
8041 const char *type,
8042 uint32_t frameNumber)
8043{
8044 //Some sanity checks
8045 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8046 LOGE("Tuning sensor data size bigger than expected %d: %d",
8047 meta.tuning_sensor_data_size,
8048 TUNING_SENSOR_DATA_MAX);
8049 return;
8050 }
8051
8052 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8053 LOGE("Tuning VFE data size bigger than expected %d: %d",
8054 meta.tuning_vfe_data_size,
8055 TUNING_VFE_DATA_MAX);
8056 return;
8057 }
8058
8059 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8060 LOGE("Tuning CPP data size bigger than expected %d: %d",
8061 meta.tuning_cpp_data_size,
8062 TUNING_CPP_DATA_MAX);
8063 return;
8064 }
8065
8066 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8067 LOGE("Tuning CAC data size bigger than expected %d: %d",
8068 meta.tuning_cac_data_size,
8069 TUNING_CAC_DATA_MAX);
8070 return;
8071 }
8072 //
8073
8074 if(enabled){
8075 char timeBuf[FILENAME_MAX];
8076 char buf[FILENAME_MAX];
8077 memset(buf, 0, sizeof(buf));
8078 memset(timeBuf, 0, sizeof(timeBuf));
8079 time_t current_time;
8080 struct tm * timeinfo;
8081 time (&current_time);
8082 timeinfo = localtime (&current_time);
8083 if (timeinfo != NULL) {
8084 strftime (timeBuf, sizeof(timeBuf),
8085 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8086 }
8087 String8 filePath(timeBuf);
8088 snprintf(buf,
8089 sizeof(buf),
8090 "%dm_%s_%d.bin",
8091 dumpFrameCount,
8092 type,
8093 frameNumber);
8094 filePath.append(buf);
8095 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8096 if (file_fd >= 0) {
8097 ssize_t written_len = 0;
8098 meta.tuning_data_version = TUNING_DATA_VERSION;
8099 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8100 written_len += write(file_fd, data, sizeof(uint32_t));
8101 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8102 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8103 written_len += write(file_fd, data, sizeof(uint32_t));
8104 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8105 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8106 written_len += write(file_fd, data, sizeof(uint32_t));
8107 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8108 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8109 written_len += write(file_fd, data, sizeof(uint32_t));
8110 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8111 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8112 written_len += write(file_fd, data, sizeof(uint32_t));
8113 meta.tuning_mod3_data_size = 0;
8114 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8115 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8116 written_len += write(file_fd, data, sizeof(uint32_t));
8117 size_t total_size = meta.tuning_sensor_data_size;
8118 data = (void *)((uint8_t *)&meta.data);
8119 written_len += write(file_fd, data, total_size);
8120 total_size = meta.tuning_vfe_data_size;
8121 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8122 written_len += write(file_fd, data, total_size);
8123 total_size = meta.tuning_cpp_data_size;
8124 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8125 written_len += write(file_fd, data, total_size);
8126 total_size = meta.tuning_cac_data_size;
8127 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8128 written_len += write(file_fd, data, total_size);
8129 close(file_fd);
8130 }else {
8131 LOGE("fail to open file for metadata dumping");
8132 }
8133 }
8134}
8135
8136/*===========================================================================
8137 * FUNCTION : cleanAndSortStreamInfo
8138 *
8139 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8140 * and sort them such that raw stream is at the end of the list
8141 * This is a workaround for camera daemon constraint.
8142 *
8143 * PARAMETERS : None
8144 *
8145 *==========================================================================*/
8146void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8147{
8148 List<stream_info_t *> newStreamInfo;
8149
8150 /*clean up invalid streams*/
8151 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8152 it != mStreamInfo.end();) {
8153 if(((*it)->status) == INVALID){
8154 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8155 delete channel;
8156 free(*it);
8157 it = mStreamInfo.erase(it);
8158 } else {
8159 it++;
8160 }
8161 }
8162
8163 // Move preview/video/callback/snapshot streams into newList
8164 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8165 it != mStreamInfo.end();) {
8166 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8167 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8168 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8169 newStreamInfo.push_back(*it);
8170 it = mStreamInfo.erase(it);
8171 } else
8172 it++;
8173 }
8174 // Move raw streams into newList
8175 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8176 it != mStreamInfo.end();) {
8177 newStreamInfo.push_back(*it);
8178 it = mStreamInfo.erase(it);
8179 }
8180
8181 mStreamInfo = newStreamInfo;
8182}
8183
8184/*===========================================================================
8185 * FUNCTION : extractJpegMetadata
8186 *
8187 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8188 * JPEG metadata is cached in HAL, and return as part of capture
8189 * result when metadata is returned from camera daemon.
8190 *
8191 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8192 * @request: capture request
8193 *
8194 *==========================================================================*/
8195void QCamera3HardwareInterface::extractJpegMetadata(
8196 CameraMetadata& jpegMetadata,
8197 const camera3_capture_request_t *request)
8198{
8199 CameraMetadata frame_settings;
8200 frame_settings = request->settings;
8201
8202 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8203 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8204 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8205 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8206
8207 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8208 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8209 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8210 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8211
8212 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8213 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8214 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8215 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8216
8217 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8218 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8219 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8220 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8221
8222 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8223 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8224 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8225 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8226
8227 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8228 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8229 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8230 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8231
8232 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8233 int32_t thumbnail_size[2];
8234 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8235 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8236 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8237 int32_t orientation =
8238 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008239 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008240 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8241 int32_t temp;
8242 temp = thumbnail_size[0];
8243 thumbnail_size[0] = thumbnail_size[1];
8244 thumbnail_size[1] = temp;
8245 }
8246 }
8247 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8248 thumbnail_size,
8249 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8250 }
8251
8252}
8253
8254/*===========================================================================
8255 * FUNCTION : convertToRegions
8256 *
8257 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8258 *
8259 * PARAMETERS :
8260 * @rect : cam_rect_t struct to convert
8261 * @region : int32_t destination array
8262 * @weight : if we are converting from cam_area_t, weight is valid
8263 * else weight = -1
8264 *
8265 *==========================================================================*/
8266void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8267 int32_t *region, int weight)
8268{
8269 region[0] = rect.left;
8270 region[1] = rect.top;
8271 region[2] = rect.left + rect.width;
8272 region[3] = rect.top + rect.height;
8273 if (weight > -1) {
8274 region[4] = weight;
8275 }
8276}
8277
8278/*===========================================================================
8279 * FUNCTION : convertFromRegions
8280 *
8281 * DESCRIPTION: helper method to convert from array to cam_rect_t
8282 *
8283 * PARAMETERS :
8284 * @rect : cam_rect_t struct to convert
8285 * @region : int32_t destination array
8286 * @weight : if we are converting from cam_area_t, weight is valid
8287 * else weight = -1
8288 *
8289 *==========================================================================*/
8290void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008291 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008292{
Thierry Strudel3d639192016-09-09 11:52:26 -07008293 int32_t x_min = frame_settings.find(tag).data.i32[0];
8294 int32_t y_min = frame_settings.find(tag).data.i32[1];
8295 int32_t x_max = frame_settings.find(tag).data.i32[2];
8296 int32_t y_max = frame_settings.find(tag).data.i32[3];
8297 roi.weight = frame_settings.find(tag).data.i32[4];
8298 roi.rect.left = x_min;
8299 roi.rect.top = y_min;
8300 roi.rect.width = x_max - x_min;
8301 roi.rect.height = y_max - y_min;
8302}
8303
8304/*===========================================================================
8305 * FUNCTION : resetIfNeededROI
8306 *
8307 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8308 * crop region
8309 *
8310 * PARAMETERS :
8311 * @roi : cam_area_t struct to resize
8312 * @scalerCropRegion : cam_crop_region_t region to compare against
8313 *
8314 *
8315 *==========================================================================*/
8316bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8317 const cam_crop_region_t* scalerCropRegion)
8318{
8319 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8320 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8321 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8322 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8323
8324 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8325 * without having this check the calculations below to validate if the roi
8326 * is inside scalar crop region will fail resulting in the roi not being
8327 * reset causing algorithm to continue to use stale roi window
8328 */
8329 if (roi->weight == 0) {
8330 return true;
8331 }
8332
8333 if ((roi_x_max < scalerCropRegion->left) ||
8334 // right edge of roi window is left of scalar crop's left edge
8335 (roi_y_max < scalerCropRegion->top) ||
8336 // bottom edge of roi window is above scalar crop's top edge
8337 (roi->rect.left > crop_x_max) ||
8338 // left edge of roi window is beyond(right) of scalar crop's right edge
8339 (roi->rect.top > crop_y_max)){
8340 // top edge of roi windo is above scalar crop's top edge
8341 return false;
8342 }
8343 if (roi->rect.left < scalerCropRegion->left) {
8344 roi->rect.left = scalerCropRegion->left;
8345 }
8346 if (roi->rect.top < scalerCropRegion->top) {
8347 roi->rect.top = scalerCropRegion->top;
8348 }
8349 if (roi_x_max > crop_x_max) {
8350 roi_x_max = crop_x_max;
8351 }
8352 if (roi_y_max > crop_y_max) {
8353 roi_y_max = crop_y_max;
8354 }
8355 roi->rect.width = roi_x_max - roi->rect.left;
8356 roi->rect.height = roi_y_max - roi->rect.top;
8357 return true;
8358}
8359
8360/*===========================================================================
8361 * FUNCTION : convertLandmarks
8362 *
8363 * DESCRIPTION: helper method to extract the landmarks from face detection info
8364 *
8365 * PARAMETERS :
8366 * @landmark_data : input landmark data to be converted
8367 * @landmarks : int32_t destination array
8368 *
8369 *
8370 *==========================================================================*/
8371void QCamera3HardwareInterface::convertLandmarks(
8372 cam_face_landmarks_info_t landmark_data,
8373 int32_t *landmarks)
8374{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008375 if (landmark_data.is_left_eye_valid) {
8376 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8377 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8378 } else {
8379 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8380 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8381 }
8382
8383 if (landmark_data.is_right_eye_valid) {
8384 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8385 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8386 } else {
8387 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8388 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8389 }
8390
8391 if (landmark_data.is_mouth_valid) {
8392 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8393 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8394 } else {
8395 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8396 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8397 }
8398}
8399
8400/*===========================================================================
8401 * FUNCTION : setInvalidLandmarks
8402 *
8403 * DESCRIPTION: helper method to set invalid landmarks
8404 *
8405 * PARAMETERS :
8406 * @landmarks : int32_t destination array
8407 *
8408 *
8409 *==========================================================================*/
8410void QCamera3HardwareInterface::setInvalidLandmarks(
8411 int32_t *landmarks)
8412{
8413 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8414 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8415 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8416 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8417 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8418 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008419}
8420
8421#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008422
8423/*===========================================================================
8424 * FUNCTION : getCapabilities
8425 *
8426 * DESCRIPTION: query camera capability from back-end
8427 *
8428 * PARAMETERS :
8429 * @ops : mm-interface ops structure
8430 * @cam_handle : camera handle for which we need capability
8431 *
8432 * RETURN : ptr type of capability structure
8433 * capability for success
8434 * NULL for failure
8435 *==========================================================================*/
8436cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8437 uint32_t cam_handle)
8438{
8439 int rc = NO_ERROR;
8440 QCamera3HeapMemory *capabilityHeap = NULL;
8441 cam_capability_t *cap_ptr = NULL;
8442
8443 if (ops == NULL) {
8444 LOGE("Invalid arguments");
8445 return NULL;
8446 }
8447
8448 capabilityHeap = new QCamera3HeapMemory(1);
8449 if (capabilityHeap == NULL) {
8450 LOGE("creation of capabilityHeap failed");
8451 return NULL;
8452 }
8453
8454 /* Allocate memory for capability buffer */
8455 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8456 if(rc != OK) {
8457 LOGE("No memory for cappability");
8458 goto allocate_failed;
8459 }
8460
8461 /* Map memory for capability buffer */
8462 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8463
8464 rc = ops->map_buf(cam_handle,
8465 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8466 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8467 if(rc < 0) {
8468 LOGE("failed to map capability buffer");
8469 rc = FAILED_TRANSACTION;
8470 goto map_failed;
8471 }
8472
8473 /* Query Capability */
8474 rc = ops->query_capability(cam_handle);
8475 if(rc < 0) {
8476 LOGE("failed to query capability");
8477 rc = FAILED_TRANSACTION;
8478 goto query_failed;
8479 }
8480
8481 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8482 if (cap_ptr == NULL) {
8483 LOGE("out of memory");
8484 rc = NO_MEMORY;
8485 goto query_failed;
8486 }
8487
8488 memset(cap_ptr, 0, sizeof(cam_capability_t));
8489 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8490
8491 int index;
8492 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8493 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8494 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8495 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8496 }
8497
8498query_failed:
8499 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8500map_failed:
8501 capabilityHeap->deallocate();
8502allocate_failed:
8503 delete capabilityHeap;
8504
8505 if (rc != NO_ERROR) {
8506 return NULL;
8507 } else {
8508 return cap_ptr;
8509 }
8510}
8511
Thierry Strudel3d639192016-09-09 11:52:26 -07008512/*===========================================================================
8513 * FUNCTION : initCapabilities
8514 *
8515 * DESCRIPTION: initialize camera capabilities in static data struct
8516 *
8517 * PARAMETERS :
8518 * @cameraId : camera Id
8519 *
8520 * RETURN : int32_t type of status
8521 * NO_ERROR -- success
8522 * none-zero failure code
8523 *==========================================================================*/
8524int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8525{
8526 int rc = 0;
8527 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008528 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008529
8530 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8531 if (rc) {
8532 LOGE("camera_open failed. rc = %d", rc);
8533 goto open_failed;
8534 }
8535 if (!cameraHandle) {
8536 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8537 goto open_failed;
8538 }
8539
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008540 handle = get_main_camera_handle(cameraHandle->camera_handle);
8541 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8542 if (gCamCapability[cameraId] == NULL) {
8543 rc = FAILED_TRANSACTION;
8544 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008545 }
8546
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008547 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008548 if (is_dual_camera_by_idx(cameraId)) {
8549 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8550 gCamCapability[cameraId]->aux_cam_cap =
8551 getCapabilities(cameraHandle->ops, handle);
8552 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8553 rc = FAILED_TRANSACTION;
8554 free(gCamCapability[cameraId]);
8555 goto failed_op;
8556 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008557
8558 // Copy the main camera capability to main_cam_cap struct
8559 gCamCapability[cameraId]->main_cam_cap =
8560 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8561 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8562 LOGE("out of memory");
8563 rc = NO_MEMORY;
8564 goto failed_op;
8565 }
8566 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8567 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008568 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008569failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008570 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8571 cameraHandle = NULL;
8572open_failed:
8573 return rc;
8574}
8575
8576/*==========================================================================
8577 * FUNCTION : get3Aversion
8578 *
8579 * DESCRIPTION: get the Q3A S/W version
8580 *
8581 * PARAMETERS :
8582 * @sw_version: Reference of Q3A structure which will hold version info upon
8583 * return
8584 *
8585 * RETURN : None
8586 *
8587 *==========================================================================*/
8588void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8589{
8590 if(gCamCapability[mCameraId])
8591 sw_version = gCamCapability[mCameraId]->q3a_version;
8592 else
8593 LOGE("Capability structure NULL!");
8594}
8595
8596
8597/*===========================================================================
8598 * FUNCTION : initParameters
8599 *
8600 * DESCRIPTION: initialize camera parameters
8601 *
8602 * PARAMETERS :
8603 *
8604 * RETURN : int32_t type of status
8605 * NO_ERROR -- success
8606 * none-zero failure code
8607 *==========================================================================*/
8608int QCamera3HardwareInterface::initParameters()
8609{
8610 int rc = 0;
8611
8612 //Allocate Set Param Buffer
8613 mParamHeap = new QCamera3HeapMemory(1);
8614 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8615 if(rc != OK) {
8616 rc = NO_MEMORY;
8617 LOGE("Failed to allocate SETPARM Heap memory");
8618 delete mParamHeap;
8619 mParamHeap = NULL;
8620 return rc;
8621 }
8622
8623 //Map memory for parameters buffer
8624 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8625 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8626 mParamHeap->getFd(0),
8627 sizeof(metadata_buffer_t),
8628 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8629 if(rc < 0) {
8630 LOGE("failed to map SETPARM buffer");
8631 rc = FAILED_TRANSACTION;
8632 mParamHeap->deallocate();
8633 delete mParamHeap;
8634 mParamHeap = NULL;
8635 return rc;
8636 }
8637
8638 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8639
8640 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8641 return rc;
8642}
8643
8644/*===========================================================================
8645 * FUNCTION : deinitParameters
8646 *
8647 * DESCRIPTION: de-initialize camera parameters
8648 *
8649 * PARAMETERS :
8650 *
8651 * RETURN : NONE
8652 *==========================================================================*/
8653void QCamera3HardwareInterface::deinitParameters()
8654{
8655 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8656 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8657
8658 mParamHeap->deallocate();
8659 delete mParamHeap;
8660 mParamHeap = NULL;
8661
8662 mParameters = NULL;
8663
8664 free(mPrevParameters);
8665 mPrevParameters = NULL;
8666}
8667
8668/*===========================================================================
8669 * FUNCTION : calcMaxJpegSize
8670 *
8671 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8672 *
8673 * PARAMETERS :
8674 *
8675 * RETURN : max_jpeg_size
8676 *==========================================================================*/
8677size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8678{
8679 size_t max_jpeg_size = 0;
8680 size_t temp_width, temp_height;
8681 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8682 MAX_SIZES_CNT);
8683 for (size_t i = 0; i < count; i++) {
8684 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8685 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8686 if (temp_width * temp_height > max_jpeg_size ) {
8687 max_jpeg_size = temp_width * temp_height;
8688 }
8689 }
8690 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8691 return max_jpeg_size;
8692}
8693
8694/*===========================================================================
8695 * FUNCTION : getMaxRawSize
8696 *
8697 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8698 *
8699 * PARAMETERS :
8700 *
8701 * RETURN : Largest supported Raw Dimension
8702 *==========================================================================*/
8703cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8704{
8705 int max_width = 0;
8706 cam_dimension_t maxRawSize;
8707
8708 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8709 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8710 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8711 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8712 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8713 }
8714 }
8715 return maxRawSize;
8716}
8717
8718
8719/*===========================================================================
8720 * FUNCTION : calcMaxJpegDim
8721 *
8722 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8723 *
8724 * PARAMETERS :
8725 *
8726 * RETURN : max_jpeg_dim
8727 *==========================================================================*/
8728cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8729{
8730 cam_dimension_t max_jpeg_dim;
8731 cam_dimension_t curr_jpeg_dim;
8732 max_jpeg_dim.width = 0;
8733 max_jpeg_dim.height = 0;
8734 curr_jpeg_dim.width = 0;
8735 curr_jpeg_dim.height = 0;
8736 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8737 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8738 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8739 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8740 max_jpeg_dim.width * max_jpeg_dim.height ) {
8741 max_jpeg_dim.width = curr_jpeg_dim.width;
8742 max_jpeg_dim.height = curr_jpeg_dim.height;
8743 }
8744 }
8745 return max_jpeg_dim;
8746}
8747
8748/*===========================================================================
8749 * FUNCTION : addStreamConfig
8750 *
8751 * DESCRIPTION: adds the stream configuration to the array
8752 *
8753 * PARAMETERS :
8754 * @available_stream_configs : pointer to stream configuration array
8755 * @scalar_format : scalar format
8756 * @dim : configuration dimension
8757 * @config_type : input or output configuration type
8758 *
8759 * RETURN : NONE
8760 *==========================================================================*/
8761void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8762 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8763{
8764 available_stream_configs.add(scalar_format);
8765 available_stream_configs.add(dim.width);
8766 available_stream_configs.add(dim.height);
8767 available_stream_configs.add(config_type);
8768}
8769
8770/*===========================================================================
8771 * FUNCTION : suppportBurstCapture
8772 *
8773 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8774 *
8775 * PARAMETERS :
8776 * @cameraId : camera Id
8777 *
8778 * RETURN : true if camera supports BURST_CAPTURE
8779 * false otherwise
8780 *==========================================================================*/
8781bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8782{
8783 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8784 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8785 const int32_t highResWidth = 3264;
8786 const int32_t highResHeight = 2448;
8787
8788 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8789 // Maximum resolution images cannot be captured at >= 10fps
8790 // -> not supporting BURST_CAPTURE
8791 return false;
8792 }
8793
8794 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8795 // Maximum resolution images can be captured at >= 20fps
8796 // --> supporting BURST_CAPTURE
8797 return true;
8798 }
8799
8800 // Find the smallest highRes resolution, or largest resolution if there is none
8801 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8802 MAX_SIZES_CNT);
8803 size_t highRes = 0;
8804 while ((highRes + 1 < totalCnt) &&
8805 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8806 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8807 highResWidth * highResHeight)) {
8808 highRes++;
8809 }
8810 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8811 return true;
8812 } else {
8813 return false;
8814 }
8815}
8816
8817/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008818 * FUNCTION : getPDStatIndex
8819 *
8820 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8821 *
8822 * PARAMETERS :
8823 * @caps : camera capabilities
8824 *
8825 * RETURN : int32_t type
8826 * non-negative - on success
8827 * -1 - on failure
8828 *==========================================================================*/
8829int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8830 if (nullptr == caps) {
8831 return -1;
8832 }
8833
8834 uint32_t metaRawCount = caps->meta_raw_channel_count;
8835 int32_t ret = -1;
8836 for (size_t i = 0; i < metaRawCount; i++) {
8837 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8838 ret = i;
8839 break;
8840 }
8841 }
8842
8843 return ret;
8844}
8845
8846/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008847 * FUNCTION : initStaticMetadata
8848 *
8849 * DESCRIPTION: initialize the static metadata
8850 *
8851 * PARAMETERS :
8852 * @cameraId : camera Id
8853 *
8854 * RETURN : int32_t type of status
8855 * 0 -- success
8856 * non-zero failure code
8857 *==========================================================================*/
8858int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8859{
8860 int rc = 0;
8861 CameraMetadata staticInfo;
8862 size_t count = 0;
8863 bool limitedDevice = false;
8864 char prop[PROPERTY_VALUE_MAX];
8865 bool supportBurst = false;
8866
8867 supportBurst = supportBurstCapture(cameraId);
8868
8869 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8870 * guaranteed or if min fps of max resolution is less than 20 fps, its
8871 * advertised as limited device*/
8872 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8873 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8874 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8875 !supportBurst;
8876
8877 uint8_t supportedHwLvl = limitedDevice ?
8878 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008879#ifndef USE_HAL_3_3
8880 // LEVEL_3 - This device will support level 3.
8881 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8882#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008883 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008884#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008885
8886 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8887 &supportedHwLvl, 1);
8888
8889 bool facingBack = false;
8890 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8891 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8892 facingBack = true;
8893 }
8894 /*HAL 3 only*/
8895 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8896 &gCamCapability[cameraId]->min_focus_distance, 1);
8897
8898 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8899 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8900
8901 /*should be using focal lengths but sensor doesn't provide that info now*/
8902 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8903 &gCamCapability[cameraId]->focal_length,
8904 1);
8905
8906 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8907 gCamCapability[cameraId]->apertures,
8908 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8909
8910 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8911 gCamCapability[cameraId]->filter_densities,
8912 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8913
8914
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008915 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8916 size_t mode_count =
8917 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8918 for (size_t i = 0; i < mode_count; i++) {
8919 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8920 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008921 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008922 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008923
8924 int32_t lens_shading_map_size[] = {
8925 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8926 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8927 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8928 lens_shading_map_size,
8929 sizeof(lens_shading_map_size)/sizeof(int32_t));
8930
8931 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8932 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8933
8934 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8935 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8936
8937 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8938 &gCamCapability[cameraId]->max_frame_duration, 1);
8939
8940 camera_metadata_rational baseGainFactor = {
8941 gCamCapability[cameraId]->base_gain_factor.numerator,
8942 gCamCapability[cameraId]->base_gain_factor.denominator};
8943 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8944 &baseGainFactor, 1);
8945
8946 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8947 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8948
8949 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8950 gCamCapability[cameraId]->pixel_array_size.height};
8951 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8952 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8953
8954 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8955 gCamCapability[cameraId]->active_array_size.top,
8956 gCamCapability[cameraId]->active_array_size.width,
8957 gCamCapability[cameraId]->active_array_size.height};
8958 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8959 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8960
8961 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8962 &gCamCapability[cameraId]->white_level, 1);
8963
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008964 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8965 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8966 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008967 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008968 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008969
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008970#ifndef USE_HAL_3_3
8971 bool hasBlackRegions = false;
8972 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8973 LOGW("black_region_count: %d is bounded to %d",
8974 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8975 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8976 }
8977 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8978 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8979 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8980 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8981 }
8982 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8983 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8984 hasBlackRegions = true;
8985 }
8986#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008987 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8988 &gCamCapability[cameraId]->flash_charge_duration, 1);
8989
8990 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8991 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8992
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008993 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8994 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8995 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008996 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8997 &timestampSource, 1);
8998
Thierry Strudel54dc9782017-02-15 12:12:10 -08008999 //update histogram vendor data
9000 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009001 &gCamCapability[cameraId]->histogram_size, 1);
9002
Thierry Strudel54dc9782017-02-15 12:12:10 -08009003 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009004 &gCamCapability[cameraId]->max_histogram_count, 1);
9005
Shuzhen Wang14415f52016-11-16 18:26:18 -08009006 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9007 //so that app can request fewer number of bins than the maximum supported.
9008 std::vector<int32_t> histBins;
9009 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9010 histBins.push_back(maxHistBins);
9011 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9012 (maxHistBins & 0x1) == 0) {
9013 histBins.push_back(maxHistBins >> 1);
9014 maxHistBins >>= 1;
9015 }
9016 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9017 histBins.data(), histBins.size());
9018
Thierry Strudel3d639192016-09-09 11:52:26 -07009019 int32_t sharpness_map_size[] = {
9020 gCamCapability[cameraId]->sharpness_map_size.width,
9021 gCamCapability[cameraId]->sharpness_map_size.height};
9022
9023 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9024 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9025
9026 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9027 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9028
Emilian Peev0f3c3162017-03-15 12:57:46 +00009029 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9030 if (0 <= indexPD) {
9031 // Advertise PD stats data as part of the Depth capabilities
9032 int32_t depthWidth =
9033 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9034 int32_t depthHeight =
9035 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9036 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9037 assert(0 < depthSamplesCount);
9038 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9039 &depthSamplesCount, 1);
9040
9041 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9042 depthHeight,
9043 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9044 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9045 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9046 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9047 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9048
9049 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9050 depthHeight, 33333333,
9051 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9052 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9053 depthMinDuration,
9054 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9055
9056 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9057 depthHeight, 0,
9058 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9059 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9060 depthStallDuration,
9061 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9062
9063 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9064 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9065 }
9066
Thierry Strudel3d639192016-09-09 11:52:26 -07009067 int32_t scalar_formats[] = {
9068 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9069 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9070 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9071 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9072 HAL_PIXEL_FORMAT_RAW10,
9073 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009074 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9075 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9076 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009077
9078 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9079 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9080 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9081 count, MAX_SIZES_CNT, available_processed_sizes);
9082 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9083 available_processed_sizes, count * 2);
9084
9085 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9086 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9087 makeTable(gCamCapability[cameraId]->raw_dim,
9088 count, MAX_SIZES_CNT, available_raw_sizes);
9089 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9090 available_raw_sizes, count * 2);
9091
9092 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9093 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9094 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9095 count, MAX_SIZES_CNT, available_fps_ranges);
9096 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9097 available_fps_ranges, count * 2);
9098
9099 camera_metadata_rational exposureCompensationStep = {
9100 gCamCapability[cameraId]->exp_compensation_step.numerator,
9101 gCamCapability[cameraId]->exp_compensation_step.denominator};
9102 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9103 &exposureCompensationStep, 1);
9104
9105 Vector<uint8_t> availableVstabModes;
9106 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9107 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009108 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009109 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009110 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009111 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009112 count = IS_TYPE_MAX;
9113 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9114 for (size_t i = 0; i < count; i++) {
9115 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9116 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9117 eisSupported = true;
9118 break;
9119 }
9120 }
9121 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009122 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9123 }
9124 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9125 availableVstabModes.array(), availableVstabModes.size());
9126
9127 /*HAL 1 and HAL 3 common*/
9128 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9129 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9130 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
9131 float maxZoom = maxZoomStep/minZoomStep;
9132 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9133 &maxZoom, 1);
9134
9135 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9136 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9137
9138 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9139 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9140 max3aRegions[2] = 0; /* AF not supported */
9141 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9142 max3aRegions, 3);
9143
9144 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9145 memset(prop, 0, sizeof(prop));
9146 property_get("persist.camera.facedetect", prop, "1");
9147 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9148 LOGD("Support face detection mode: %d",
9149 supportedFaceDetectMode);
9150
9151 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009152 /* support mode should be OFF if max number of face is 0 */
9153 if (maxFaces <= 0) {
9154 supportedFaceDetectMode = 0;
9155 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009156 Vector<uint8_t> availableFaceDetectModes;
9157 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9158 if (supportedFaceDetectMode == 1) {
9159 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9160 } else if (supportedFaceDetectMode == 2) {
9161 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9162 } else if (supportedFaceDetectMode == 3) {
9163 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9164 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9165 } else {
9166 maxFaces = 0;
9167 }
9168 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9169 availableFaceDetectModes.array(),
9170 availableFaceDetectModes.size());
9171 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9172 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009173 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9174 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9175 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009176
9177 int32_t exposureCompensationRange[] = {
9178 gCamCapability[cameraId]->exposure_compensation_min,
9179 gCamCapability[cameraId]->exposure_compensation_max};
9180 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9181 exposureCompensationRange,
9182 sizeof(exposureCompensationRange)/sizeof(int32_t));
9183
9184 uint8_t lensFacing = (facingBack) ?
9185 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9186 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9187
9188 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9189 available_thumbnail_sizes,
9190 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9191
9192 /*all sizes will be clubbed into this tag*/
9193 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9194 /*android.scaler.availableStreamConfigurations*/
9195 Vector<int32_t> available_stream_configs;
9196 cam_dimension_t active_array_dim;
9197 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9198 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009199
9200 /*advertise list of input dimensions supported based on below property.
9201 By default all sizes upto 5MP will be advertised.
9202 Note that the setprop resolution format should be WxH.
9203 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9204 To list all supported sizes, setprop needs to be set with "0x0" */
9205 cam_dimension_t minInputSize = {2592,1944}; //5MP
9206 memset(prop, 0, sizeof(prop));
9207 property_get("persist.camera.input.minsize", prop, "2592x1944");
9208 if (strlen(prop) > 0) {
9209 char *saveptr = NULL;
9210 char *token = strtok_r(prop, "x", &saveptr);
9211 if (token != NULL) {
9212 minInputSize.width = atoi(token);
9213 }
9214 token = strtok_r(NULL, "x", &saveptr);
9215 if (token != NULL) {
9216 minInputSize.height = atoi(token);
9217 }
9218 }
9219
Thierry Strudel3d639192016-09-09 11:52:26 -07009220 /* Add input/output stream configurations for each scalar formats*/
9221 for (size_t j = 0; j < scalar_formats_count; j++) {
9222 switch (scalar_formats[j]) {
9223 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9224 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9225 case HAL_PIXEL_FORMAT_RAW10:
9226 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9227 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9228 addStreamConfig(available_stream_configs, scalar_formats[j],
9229 gCamCapability[cameraId]->raw_dim[i],
9230 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9231 }
9232 break;
9233 case HAL_PIXEL_FORMAT_BLOB:
9234 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9235 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9236 addStreamConfig(available_stream_configs, scalar_formats[j],
9237 gCamCapability[cameraId]->picture_sizes_tbl[i],
9238 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9239 }
9240 break;
9241 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9242 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9243 default:
9244 cam_dimension_t largest_picture_size;
9245 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9246 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9247 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9248 addStreamConfig(available_stream_configs, scalar_formats[j],
9249 gCamCapability[cameraId]->picture_sizes_tbl[i],
9250 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009251 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9252 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9253 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9254 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9255 >= minInputSize.width) || (gCamCapability[cameraId]->
9256 picture_sizes_tbl[i].height >= minInputSize.height)) {
9257 addStreamConfig(available_stream_configs, scalar_formats[j],
9258 gCamCapability[cameraId]->picture_sizes_tbl[i],
9259 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9260 }
9261 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009262 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009263
Thierry Strudel3d639192016-09-09 11:52:26 -07009264 break;
9265 }
9266 }
9267
9268 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9269 available_stream_configs.array(), available_stream_configs.size());
9270 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9271 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9272
9273 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9274 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9275
9276 /* android.scaler.availableMinFrameDurations */
9277 Vector<int64_t> available_min_durations;
9278 for (size_t j = 0; j < scalar_formats_count; j++) {
9279 switch (scalar_formats[j]) {
9280 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9281 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9282 case HAL_PIXEL_FORMAT_RAW10:
9283 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9284 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9285 available_min_durations.add(scalar_formats[j]);
9286 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9287 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9288 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9289 }
9290 break;
9291 default:
9292 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9293 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9294 available_min_durations.add(scalar_formats[j]);
9295 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9296 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9297 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9298 }
9299 break;
9300 }
9301 }
9302 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9303 available_min_durations.array(), available_min_durations.size());
9304
9305 Vector<int32_t> available_hfr_configs;
9306 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9307 int32_t fps = 0;
9308 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9309 case CAM_HFR_MODE_60FPS:
9310 fps = 60;
9311 break;
9312 case CAM_HFR_MODE_90FPS:
9313 fps = 90;
9314 break;
9315 case CAM_HFR_MODE_120FPS:
9316 fps = 120;
9317 break;
9318 case CAM_HFR_MODE_150FPS:
9319 fps = 150;
9320 break;
9321 case CAM_HFR_MODE_180FPS:
9322 fps = 180;
9323 break;
9324 case CAM_HFR_MODE_210FPS:
9325 fps = 210;
9326 break;
9327 case CAM_HFR_MODE_240FPS:
9328 fps = 240;
9329 break;
9330 case CAM_HFR_MODE_480FPS:
9331 fps = 480;
9332 break;
9333 case CAM_HFR_MODE_OFF:
9334 case CAM_HFR_MODE_MAX:
9335 default:
9336 break;
9337 }
9338
9339 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9340 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9341 /* For each HFR frame rate, need to advertise one variable fps range
9342 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9343 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9344 * set by the app. When video recording is started, [120, 120] is
9345 * set. This way sensor configuration does not change when recording
9346 * is started */
9347
9348 /* (width, height, fps_min, fps_max, batch_size_max) */
9349 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9350 j < MAX_SIZES_CNT; j++) {
9351 available_hfr_configs.add(
9352 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9353 available_hfr_configs.add(
9354 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9355 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9356 available_hfr_configs.add(fps);
9357 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9358
9359 /* (width, height, fps_min, fps_max, batch_size_max) */
9360 available_hfr_configs.add(
9361 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9362 available_hfr_configs.add(
9363 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9364 available_hfr_configs.add(fps);
9365 available_hfr_configs.add(fps);
9366 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9367 }
9368 }
9369 }
9370 //Advertise HFR capability only if the property is set
9371 memset(prop, 0, sizeof(prop));
9372 property_get("persist.camera.hal3hfr.enable", prop, "1");
9373 uint8_t hfrEnable = (uint8_t)atoi(prop);
9374
9375 if(hfrEnable && available_hfr_configs.array()) {
9376 staticInfo.update(
9377 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9378 available_hfr_configs.array(), available_hfr_configs.size());
9379 }
9380
9381 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9382 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9383 &max_jpeg_size, 1);
9384
9385 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9386 size_t size = 0;
9387 count = CAM_EFFECT_MODE_MAX;
9388 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9389 for (size_t i = 0; i < count; i++) {
9390 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9391 gCamCapability[cameraId]->supported_effects[i]);
9392 if (NAME_NOT_FOUND != val) {
9393 avail_effects[size] = (uint8_t)val;
9394 size++;
9395 }
9396 }
9397 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9398 avail_effects,
9399 size);
9400
9401 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9402 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9403 size_t supported_scene_modes_cnt = 0;
9404 count = CAM_SCENE_MODE_MAX;
9405 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9406 for (size_t i = 0; i < count; i++) {
9407 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9408 CAM_SCENE_MODE_OFF) {
9409 int val = lookupFwkName(SCENE_MODES_MAP,
9410 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9411 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009412
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 if (NAME_NOT_FOUND != val) {
9414 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9415 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9416 supported_scene_modes_cnt++;
9417 }
9418 }
9419 }
9420 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9421 avail_scene_modes,
9422 supported_scene_modes_cnt);
9423
9424 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9425 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9426 supported_scene_modes_cnt,
9427 CAM_SCENE_MODE_MAX,
9428 scene_mode_overrides,
9429 supported_indexes,
9430 cameraId);
9431
9432 if (supported_scene_modes_cnt == 0) {
9433 supported_scene_modes_cnt = 1;
9434 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9435 }
9436
9437 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9438 scene_mode_overrides, supported_scene_modes_cnt * 3);
9439
9440 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9441 ANDROID_CONTROL_MODE_AUTO,
9442 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9443 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9444 available_control_modes,
9445 3);
9446
9447 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9448 size = 0;
9449 count = CAM_ANTIBANDING_MODE_MAX;
9450 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9451 for (size_t i = 0; i < count; i++) {
9452 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9453 gCamCapability[cameraId]->supported_antibandings[i]);
9454 if (NAME_NOT_FOUND != val) {
9455 avail_antibanding_modes[size] = (uint8_t)val;
9456 size++;
9457 }
9458
9459 }
9460 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9461 avail_antibanding_modes,
9462 size);
9463
9464 uint8_t avail_abberation_modes[] = {
9465 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9466 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9467 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9468 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9469 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9470 if (0 == count) {
9471 // If no aberration correction modes are available for a device, this advertise OFF mode
9472 size = 1;
9473 } else {
9474 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9475 // So, advertize all 3 modes if atleast any one mode is supported as per the
9476 // new M requirement
9477 size = 3;
9478 }
9479 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9480 avail_abberation_modes,
9481 size);
9482
9483 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9484 size = 0;
9485 count = CAM_FOCUS_MODE_MAX;
9486 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9487 for (size_t i = 0; i < count; i++) {
9488 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9489 gCamCapability[cameraId]->supported_focus_modes[i]);
9490 if (NAME_NOT_FOUND != val) {
9491 avail_af_modes[size] = (uint8_t)val;
9492 size++;
9493 }
9494 }
9495 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9496 avail_af_modes,
9497 size);
9498
9499 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9500 size = 0;
9501 count = CAM_WB_MODE_MAX;
9502 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9503 for (size_t i = 0; i < count; i++) {
9504 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9505 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9506 gCamCapability[cameraId]->supported_white_balances[i]);
9507 if (NAME_NOT_FOUND != val) {
9508 avail_awb_modes[size] = (uint8_t)val;
9509 size++;
9510 }
9511 }
9512 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9513 avail_awb_modes,
9514 size);
9515
9516 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9517 count = CAM_FLASH_FIRING_LEVEL_MAX;
9518 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9519 count);
9520 for (size_t i = 0; i < count; i++) {
9521 available_flash_levels[i] =
9522 gCamCapability[cameraId]->supported_firing_levels[i];
9523 }
9524 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9525 available_flash_levels, count);
9526
9527 uint8_t flashAvailable;
9528 if (gCamCapability[cameraId]->flash_available)
9529 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9530 else
9531 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9532 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9533 &flashAvailable, 1);
9534
9535 Vector<uint8_t> avail_ae_modes;
9536 count = CAM_AE_MODE_MAX;
9537 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9538 for (size_t i = 0; i < count; i++) {
9539 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9540 }
9541 if (flashAvailable) {
9542 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9543 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009544 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009545 }
9546 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9547 avail_ae_modes.array(),
9548 avail_ae_modes.size());
9549
9550 int32_t sensitivity_range[2];
9551 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9552 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9553 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9554 sensitivity_range,
9555 sizeof(sensitivity_range) / sizeof(int32_t));
9556
9557 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9558 &gCamCapability[cameraId]->max_analog_sensitivity,
9559 1);
9560
9561 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9562 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9563 &sensor_orientation,
9564 1);
9565
9566 int32_t max_output_streams[] = {
9567 MAX_STALLING_STREAMS,
9568 MAX_PROCESSED_STREAMS,
9569 MAX_RAW_STREAMS};
9570 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9571 max_output_streams,
9572 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9573
9574 uint8_t avail_leds = 0;
9575 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9576 &avail_leds, 0);
9577
9578 uint8_t focus_dist_calibrated;
9579 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9580 gCamCapability[cameraId]->focus_dist_calibrated);
9581 if (NAME_NOT_FOUND != val) {
9582 focus_dist_calibrated = (uint8_t)val;
9583 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9584 &focus_dist_calibrated, 1);
9585 }
9586
9587 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9588 size = 0;
9589 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9590 MAX_TEST_PATTERN_CNT);
9591 for (size_t i = 0; i < count; i++) {
9592 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9593 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9594 if (NAME_NOT_FOUND != testpatternMode) {
9595 avail_testpattern_modes[size] = testpatternMode;
9596 size++;
9597 }
9598 }
9599 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9600 avail_testpattern_modes,
9601 size);
9602
9603 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9604 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9605 &max_pipeline_depth,
9606 1);
9607
9608 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9609 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9610 &partial_result_count,
9611 1);
9612
9613 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9614 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9615
9616 Vector<uint8_t> available_capabilities;
9617 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9618 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9619 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9620 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9621 if (supportBurst) {
9622 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9623 }
9624 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9625 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9626 if (hfrEnable && available_hfr_configs.array()) {
9627 available_capabilities.add(
9628 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9629 }
9630
9631 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9632 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9633 }
9634 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9635 available_capabilities.array(),
9636 available_capabilities.size());
9637
9638 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9639 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9640 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9641 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9642
9643 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9644 &aeLockAvailable, 1);
9645
9646 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9647 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9648 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9649 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9650
9651 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9652 &awbLockAvailable, 1);
9653
9654 int32_t max_input_streams = 1;
9655 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9656 &max_input_streams,
9657 1);
9658
9659 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9660 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9661 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9662 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9663 HAL_PIXEL_FORMAT_YCbCr_420_888};
9664 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9665 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9666
9667 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9668 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9669 &max_latency,
9670 1);
9671
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009672#ifndef USE_HAL_3_3
9673 int32_t isp_sensitivity_range[2];
9674 isp_sensitivity_range[0] =
9675 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9676 isp_sensitivity_range[1] =
9677 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9678 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9679 isp_sensitivity_range,
9680 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9681#endif
9682
Thierry Strudel3d639192016-09-09 11:52:26 -07009683 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9684 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9685 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9686 available_hot_pixel_modes,
9687 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9688
9689 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9690 ANDROID_SHADING_MODE_FAST,
9691 ANDROID_SHADING_MODE_HIGH_QUALITY};
9692 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9693 available_shading_modes,
9694 3);
9695
9696 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9697 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9698 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9699 available_lens_shading_map_modes,
9700 2);
9701
9702 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9703 ANDROID_EDGE_MODE_FAST,
9704 ANDROID_EDGE_MODE_HIGH_QUALITY,
9705 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9706 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9707 available_edge_modes,
9708 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9709
9710 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9711 ANDROID_NOISE_REDUCTION_MODE_FAST,
9712 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9713 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9714 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9715 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9716 available_noise_red_modes,
9717 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9718
9719 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9720 ANDROID_TONEMAP_MODE_FAST,
9721 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9722 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9723 available_tonemap_modes,
9724 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9725
9726 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9727 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9728 available_hot_pixel_map_modes,
9729 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9730
9731 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9732 gCamCapability[cameraId]->reference_illuminant1);
9733 if (NAME_NOT_FOUND != val) {
9734 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9735 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9736 }
9737
9738 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9739 gCamCapability[cameraId]->reference_illuminant2);
9740 if (NAME_NOT_FOUND != val) {
9741 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9742 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9743 }
9744
9745 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9746 (void *)gCamCapability[cameraId]->forward_matrix1,
9747 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9748
9749 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9750 (void *)gCamCapability[cameraId]->forward_matrix2,
9751 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9752
9753 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9754 (void *)gCamCapability[cameraId]->color_transform1,
9755 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9756
9757 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9758 (void *)gCamCapability[cameraId]->color_transform2,
9759 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9760
9761 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9762 (void *)gCamCapability[cameraId]->calibration_transform1,
9763 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9764
9765 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9766 (void *)gCamCapability[cameraId]->calibration_transform2,
9767 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9768
9769 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9770 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9771 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9772 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9773 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9774 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9775 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9776 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9777 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9778 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9779 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9780 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9781 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9782 ANDROID_JPEG_GPS_COORDINATES,
9783 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9784 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9785 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9786 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9787 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9788 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9789 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9790 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9791 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9792 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009793#ifndef USE_HAL_3_3
9794 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9795#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009796 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009797 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009798 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9799 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009800 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009801 /* DevCamDebug metadata request_keys_basic */
9802 DEVCAMDEBUG_META_ENABLE,
9803 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009804 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9805 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009806 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009807
9808 size_t request_keys_cnt =
9809 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9810 Vector<int32_t> available_request_keys;
9811 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9812 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9813 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9814 }
9815
9816 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9817 available_request_keys.array(), available_request_keys.size());
9818
9819 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9820 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9821 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9822 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9823 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9824 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9825 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9826 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9827 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9828 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9829 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9830 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9831 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9832 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9833 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9834 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9835 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009836 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009837 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9838 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9839 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009840 ANDROID_STATISTICS_FACE_SCORES,
9841#ifndef USE_HAL_3_3
9842 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9843#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009844 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009845 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009846 // DevCamDebug metadata result_keys_basic
9847 DEVCAMDEBUG_META_ENABLE,
9848 // DevCamDebug metadata result_keys AF
9849 DEVCAMDEBUG_AF_LENS_POSITION,
9850 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9851 DEVCAMDEBUG_AF_TOF_DISTANCE,
9852 DEVCAMDEBUG_AF_LUMA,
9853 DEVCAMDEBUG_AF_HAF_STATE,
9854 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9855 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9856 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9857 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9858 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9859 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9860 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9861 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9862 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9863 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9864 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9865 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9866 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9867 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9868 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9869 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9870 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9871 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9872 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9873 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9874 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9875 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9876 // DevCamDebug metadata result_keys AEC
9877 DEVCAMDEBUG_AEC_TARGET_LUMA,
9878 DEVCAMDEBUG_AEC_COMP_LUMA,
9879 DEVCAMDEBUG_AEC_AVG_LUMA,
9880 DEVCAMDEBUG_AEC_CUR_LUMA,
9881 DEVCAMDEBUG_AEC_LINECOUNT,
9882 DEVCAMDEBUG_AEC_REAL_GAIN,
9883 DEVCAMDEBUG_AEC_EXP_INDEX,
9884 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009885 // DevCamDebug metadata result_keys zzHDR
9886 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9887 DEVCAMDEBUG_AEC_L_LINECOUNT,
9888 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9889 DEVCAMDEBUG_AEC_S_LINECOUNT,
9890 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9891 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9892 // DevCamDebug metadata result_keys ADRC
9893 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9894 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9895 DEVCAMDEBUG_AEC_GTM_RATIO,
9896 DEVCAMDEBUG_AEC_LTM_RATIO,
9897 DEVCAMDEBUG_AEC_LA_RATIO,
9898 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009899 // DevCamDebug metadata result_keys AWB
9900 DEVCAMDEBUG_AWB_R_GAIN,
9901 DEVCAMDEBUG_AWB_G_GAIN,
9902 DEVCAMDEBUG_AWB_B_GAIN,
9903 DEVCAMDEBUG_AWB_CCT,
9904 DEVCAMDEBUG_AWB_DECISION,
9905 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009906 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9907 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9908 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009909 };
9910
Thierry Strudel3d639192016-09-09 11:52:26 -07009911 size_t result_keys_cnt =
9912 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9913
9914 Vector<int32_t> available_result_keys;
9915 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9916 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9917 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9918 }
9919 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9920 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9921 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9922 }
9923 if (supportedFaceDetectMode == 1) {
9924 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9925 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9926 } else if ((supportedFaceDetectMode == 2) ||
9927 (supportedFaceDetectMode == 3)) {
9928 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9929 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9930 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009931#ifndef USE_HAL_3_3
9932 if (hasBlackRegions) {
9933 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9934 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9935 }
9936#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009937 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9938 available_result_keys.array(), available_result_keys.size());
9939
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009940 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009941 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9942 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9943 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9944 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9945 ANDROID_SCALER_CROPPING_TYPE,
9946 ANDROID_SYNC_MAX_LATENCY,
9947 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9948 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9949 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9950 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9951 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9952 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9953 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9954 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9955 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9956 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9957 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9958 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9959 ANDROID_LENS_FACING,
9960 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9961 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9962 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9963 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9964 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9965 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9966 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9967 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9968 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9969 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9970 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9971 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9972 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9973 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9974 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9975 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9976 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9977 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9978 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9979 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009980 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009981 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9982 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9983 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9984 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9985 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9986 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9987 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9988 ANDROID_CONTROL_AVAILABLE_MODES,
9989 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9990 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9991 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9992 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009993 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9994#ifndef USE_HAL_3_3
9995 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9996 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9997#endif
9998 };
9999
10000 Vector<int32_t> available_characteristics_keys;
10001 available_characteristics_keys.appendArray(characteristics_keys_basic,
10002 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10003#ifndef USE_HAL_3_3
10004 if (hasBlackRegions) {
10005 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10006 }
10007#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010008
10009 if (0 <= indexPD) {
10010 int32_t depthKeys[] = {
10011 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10012 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10013 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10014 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10015 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10016 };
10017 available_characteristics_keys.appendArray(depthKeys,
10018 sizeof(depthKeys) / sizeof(depthKeys[0]));
10019 }
10020
Thierry Strudel3d639192016-09-09 11:52:26 -070010021 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010022 available_characteristics_keys.array(),
10023 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010024
10025 /*available stall durations depend on the hw + sw and will be different for different devices */
10026 /*have to add for raw after implementation*/
10027 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10028 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10029
10030 Vector<int64_t> available_stall_durations;
10031 for (uint32_t j = 0; j < stall_formats_count; j++) {
10032 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10033 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10034 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10035 available_stall_durations.add(stall_formats[j]);
10036 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10037 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10038 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10039 }
10040 } else {
10041 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10042 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10043 available_stall_durations.add(stall_formats[j]);
10044 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10045 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10046 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10047 }
10048 }
10049 }
10050 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10051 available_stall_durations.array(),
10052 available_stall_durations.size());
10053
10054 //QCAMERA3_OPAQUE_RAW
10055 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10056 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10057 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10058 case LEGACY_RAW:
10059 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10060 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10061 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10062 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10063 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10064 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10065 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10066 break;
10067 case MIPI_RAW:
10068 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10069 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10070 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10071 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10072 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10073 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10074 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10075 break;
10076 default:
10077 LOGE("unknown opaque_raw_format %d",
10078 gCamCapability[cameraId]->opaque_raw_fmt);
10079 break;
10080 }
10081 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10082
10083 Vector<int32_t> strides;
10084 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10085 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10086 cam_stream_buf_plane_info_t buf_planes;
10087 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10088 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10089 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10090 &gCamCapability[cameraId]->padding_info, &buf_planes);
10091 strides.add(buf_planes.plane_info.mp[0].stride);
10092 }
10093 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10094 strides.size());
10095
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010096 //TBD: remove the following line once backend advertises zzHDR in feature mask
10097 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010098 //Video HDR default
10099 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10100 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010101 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010102 int32_t vhdr_mode[] = {
10103 QCAMERA3_VIDEO_HDR_MODE_OFF,
10104 QCAMERA3_VIDEO_HDR_MODE_ON};
10105
10106 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10107 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10108 vhdr_mode, vhdr_mode_count);
10109 }
10110
Thierry Strudel3d639192016-09-09 11:52:26 -070010111 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10112 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10113 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10114
10115 uint8_t isMonoOnly =
10116 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10117 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10118 &isMonoOnly, 1);
10119
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010120#ifndef USE_HAL_3_3
10121 Vector<int32_t> opaque_size;
10122 for (size_t j = 0; j < scalar_formats_count; j++) {
10123 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10124 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10125 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10126 cam_stream_buf_plane_info_t buf_planes;
10127
10128 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10129 &gCamCapability[cameraId]->padding_info, &buf_planes);
10130
10131 if (rc == 0) {
10132 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10133 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10134 opaque_size.add(buf_planes.plane_info.frame_len);
10135 }else {
10136 LOGE("raw frame calculation failed!");
10137 }
10138 }
10139 }
10140 }
10141
10142 if ((opaque_size.size() > 0) &&
10143 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10144 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10145 else
10146 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10147#endif
10148
Thierry Strudel04e026f2016-10-10 11:27:36 -070010149 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10150 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10151 size = 0;
10152 count = CAM_IR_MODE_MAX;
10153 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10154 for (size_t i = 0; i < count; i++) {
10155 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10156 gCamCapability[cameraId]->supported_ir_modes[i]);
10157 if (NAME_NOT_FOUND != val) {
10158 avail_ir_modes[size] = (int32_t)val;
10159 size++;
10160 }
10161 }
10162 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10163 avail_ir_modes, size);
10164 }
10165
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010166 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10167 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10168 size = 0;
10169 count = CAM_AEC_CONVERGENCE_MAX;
10170 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10171 for (size_t i = 0; i < count; i++) {
10172 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10173 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10174 if (NAME_NOT_FOUND != val) {
10175 available_instant_aec_modes[size] = (int32_t)val;
10176 size++;
10177 }
10178 }
10179 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10180 available_instant_aec_modes, size);
10181 }
10182
Thierry Strudel54dc9782017-02-15 12:12:10 -080010183 int32_t sharpness_range[] = {
10184 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10185 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10186 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10187
10188 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10189 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10190 size = 0;
10191 count = CAM_BINNING_CORRECTION_MODE_MAX;
10192 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10193 for (size_t i = 0; i < count; i++) {
10194 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10195 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10196 gCamCapability[cameraId]->supported_binning_modes[i]);
10197 if (NAME_NOT_FOUND != val) {
10198 avail_binning_modes[size] = (int32_t)val;
10199 size++;
10200 }
10201 }
10202 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10203 avail_binning_modes, size);
10204 }
10205
10206 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10207 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10208 size = 0;
10209 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10210 for (size_t i = 0; i < count; i++) {
10211 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10212 gCamCapability[cameraId]->supported_aec_modes[i]);
10213 if (NAME_NOT_FOUND != val)
10214 available_aec_modes[size++] = val;
10215 }
10216 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10217 available_aec_modes, size);
10218 }
10219
10220 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10221 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10222 size = 0;
10223 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10224 for (size_t i = 0; i < count; i++) {
10225 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10226 gCamCapability[cameraId]->supported_iso_modes[i]);
10227 if (NAME_NOT_FOUND != val)
10228 available_iso_modes[size++] = val;
10229 }
10230 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10231 available_iso_modes, size);
10232 }
10233
10234 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10235 for (size_t i = 0; i < count; i++)
10236 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10237 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10238 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10239
10240 int32_t available_saturation_range[4];
10241 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10242 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10243 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10244 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10245 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10246 available_saturation_range, 4);
10247
10248 uint8_t is_hdr_values[2];
10249 is_hdr_values[0] = 0;
10250 is_hdr_values[1] = 1;
10251 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10252 is_hdr_values, 2);
10253
10254 float is_hdr_confidence_range[2];
10255 is_hdr_confidence_range[0] = 0.0;
10256 is_hdr_confidence_range[1] = 1.0;
10257 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10258 is_hdr_confidence_range, 2);
10259
Emilian Peev0a972ef2017-03-16 10:25:53 +000010260 size_t eepromLength = strnlen(
10261 reinterpret_cast<const char *>(
10262 gCamCapability[cameraId]->eeprom_version_info),
10263 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10264 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010265 char easelInfo[] = ",E:N";
10266 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10267 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10268 eepromLength += sizeof(easelInfo);
10269 strlcat(eepromInfo, (gHdrPlusClient ? ",E:Y" : ",E:N"), MAX_EEPROM_VERSION_INFO_LEN);
10270 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010271 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10272 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10273 }
10274
Thierry Strudel3d639192016-09-09 11:52:26 -070010275 gStaticMetadata[cameraId] = staticInfo.release();
10276 return rc;
10277}
10278
10279/*===========================================================================
10280 * FUNCTION : makeTable
10281 *
10282 * DESCRIPTION: make a table of sizes
10283 *
10284 * PARAMETERS :
10285 *
10286 *
10287 *==========================================================================*/
10288void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10289 size_t max_size, int32_t *sizeTable)
10290{
10291 size_t j = 0;
10292 if (size > max_size) {
10293 size = max_size;
10294 }
10295 for (size_t i = 0; i < size; i++) {
10296 sizeTable[j] = dimTable[i].width;
10297 sizeTable[j+1] = dimTable[i].height;
10298 j+=2;
10299 }
10300}
10301
10302/*===========================================================================
10303 * FUNCTION : makeFPSTable
10304 *
10305 * DESCRIPTION: make a table of fps ranges
10306 *
10307 * PARAMETERS :
10308 *
10309 *==========================================================================*/
10310void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10311 size_t max_size, int32_t *fpsRangesTable)
10312{
10313 size_t j = 0;
10314 if (size > max_size) {
10315 size = max_size;
10316 }
10317 for (size_t i = 0; i < size; i++) {
10318 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10319 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10320 j+=2;
10321 }
10322}
10323
10324/*===========================================================================
10325 * FUNCTION : makeOverridesList
10326 *
10327 * DESCRIPTION: make a list of scene mode overrides
10328 *
10329 * PARAMETERS :
10330 *
10331 *
10332 *==========================================================================*/
10333void QCamera3HardwareInterface::makeOverridesList(
10334 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10335 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10336{
10337 /*daemon will give a list of overrides for all scene modes.
10338 However we should send the fwk only the overrides for the scene modes
10339 supported by the framework*/
10340 size_t j = 0;
10341 if (size > max_size) {
10342 size = max_size;
10343 }
10344 size_t focus_count = CAM_FOCUS_MODE_MAX;
10345 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10346 focus_count);
10347 for (size_t i = 0; i < size; i++) {
10348 bool supt = false;
10349 size_t index = supported_indexes[i];
10350 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10351 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10352 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10353 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10354 overridesTable[index].awb_mode);
10355 if (NAME_NOT_FOUND != val) {
10356 overridesList[j+1] = (uint8_t)val;
10357 }
10358 uint8_t focus_override = overridesTable[index].af_mode;
10359 for (size_t k = 0; k < focus_count; k++) {
10360 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10361 supt = true;
10362 break;
10363 }
10364 }
10365 if (supt) {
10366 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10367 focus_override);
10368 if (NAME_NOT_FOUND != val) {
10369 overridesList[j+2] = (uint8_t)val;
10370 }
10371 } else {
10372 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10373 }
10374 j+=3;
10375 }
10376}
10377
10378/*===========================================================================
10379 * FUNCTION : filterJpegSizes
10380 *
10381 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10382 * could be downscaled to
10383 *
10384 * PARAMETERS :
10385 *
10386 * RETURN : length of jpegSizes array
10387 *==========================================================================*/
10388
10389size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10390 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10391 uint8_t downscale_factor)
10392{
10393 if (0 == downscale_factor) {
10394 downscale_factor = 1;
10395 }
10396
10397 int32_t min_width = active_array_size.width / downscale_factor;
10398 int32_t min_height = active_array_size.height / downscale_factor;
10399 size_t jpegSizesCnt = 0;
10400 if (processedSizesCnt > maxCount) {
10401 processedSizesCnt = maxCount;
10402 }
10403 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10404 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10405 jpegSizes[jpegSizesCnt] = processedSizes[i];
10406 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10407 jpegSizesCnt += 2;
10408 }
10409 }
10410 return jpegSizesCnt;
10411}
10412
10413/*===========================================================================
10414 * FUNCTION : computeNoiseModelEntryS
10415 *
10416 * DESCRIPTION: function to map a given sensitivity to the S noise
10417 * model parameters in the DNG noise model.
10418 *
10419 * PARAMETERS : sens : the sensor sensitivity
10420 *
10421 ** RETURN : S (sensor amplification) noise
10422 *
10423 *==========================================================================*/
10424double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10425 double s = gCamCapability[mCameraId]->gradient_S * sens +
10426 gCamCapability[mCameraId]->offset_S;
10427 return ((s < 0.0) ? 0.0 : s);
10428}
10429
10430/*===========================================================================
10431 * FUNCTION : computeNoiseModelEntryO
10432 *
10433 * DESCRIPTION: function to map a given sensitivity to the O noise
10434 * model parameters in the DNG noise model.
10435 *
10436 * PARAMETERS : sens : the sensor sensitivity
10437 *
10438 ** RETURN : O (sensor readout) noise
10439 *
10440 *==========================================================================*/
10441double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10442 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10443 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10444 1.0 : (1.0 * sens / max_analog_sens);
10445 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10446 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10447 return ((o < 0.0) ? 0.0 : o);
10448}
10449
10450/*===========================================================================
10451 * FUNCTION : getSensorSensitivity
10452 *
10453 * DESCRIPTION: convert iso_mode to an integer value
10454 *
10455 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10456 *
10457 ** RETURN : sensitivity supported by sensor
10458 *
10459 *==========================================================================*/
10460int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10461{
10462 int32_t sensitivity;
10463
10464 switch (iso_mode) {
10465 case CAM_ISO_MODE_100:
10466 sensitivity = 100;
10467 break;
10468 case CAM_ISO_MODE_200:
10469 sensitivity = 200;
10470 break;
10471 case CAM_ISO_MODE_400:
10472 sensitivity = 400;
10473 break;
10474 case CAM_ISO_MODE_800:
10475 sensitivity = 800;
10476 break;
10477 case CAM_ISO_MODE_1600:
10478 sensitivity = 1600;
10479 break;
10480 default:
10481 sensitivity = -1;
10482 break;
10483 }
10484 return sensitivity;
10485}
10486
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010487int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10488 if (gHdrPlusClient != nullptr) {
10489 return OK;
10490 }
10491
10492 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10493 if (gHdrPlusClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010494 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10495 // to connect to Easel.
10496 bool doNotpowerOnEasel =
10497 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10498
10499 if (doNotpowerOnEasel) {
10500 gHdrPlusClient = nullptr;
10501 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10502 return OK;
10503 }
10504
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010505 // If Easel is present, power on Easel and suspend it immediately.
10506 status_t res = gHdrPlusClient->powerOnEasel();
10507 if (res != OK) {
10508 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10509 gHdrPlusClient = nullptr;
10510 return res;
10511 }
10512
10513 res = gHdrPlusClient->suspendEasel();
10514 if (res != OK) {
10515 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10516 }
10517
10518 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10519 } else {
10520 // Destroy HDR+ client if Easel isn't present.
10521 gHdrPlusClient = nullptr;
10522 }
10523
10524 return OK;
10525}
10526
Thierry Strudel3d639192016-09-09 11:52:26 -070010527/*===========================================================================
10528 * FUNCTION : getCamInfo
10529 *
10530 * DESCRIPTION: query camera capabilities
10531 *
10532 * PARAMETERS :
10533 * @cameraId : camera Id
10534 * @info : camera info struct to be filled in with camera capabilities
10535 *
10536 * RETURN : int type of status
10537 * NO_ERROR -- success
10538 * none-zero failure code
10539 *==========================================================================*/
10540int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10541 struct camera_info *info)
10542{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010543 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010544 int rc = 0;
10545
10546 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010547
10548 rc = initHdrPlusClientLocked();
10549 if (rc != OK) {
10550 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10551 pthread_mutex_unlock(&gCamLock);
10552 return rc;
10553 }
10554
Thierry Strudel3d639192016-09-09 11:52:26 -070010555 if (NULL == gCamCapability[cameraId]) {
10556 rc = initCapabilities(cameraId);
10557 if (rc < 0) {
10558 pthread_mutex_unlock(&gCamLock);
10559 return rc;
10560 }
10561 }
10562
10563 if (NULL == gStaticMetadata[cameraId]) {
10564 rc = initStaticMetadata(cameraId);
10565 if (rc < 0) {
10566 pthread_mutex_unlock(&gCamLock);
10567 return rc;
10568 }
10569 }
10570
10571 switch(gCamCapability[cameraId]->position) {
10572 case CAM_POSITION_BACK:
10573 case CAM_POSITION_BACK_AUX:
10574 info->facing = CAMERA_FACING_BACK;
10575 break;
10576
10577 case CAM_POSITION_FRONT:
10578 case CAM_POSITION_FRONT_AUX:
10579 info->facing = CAMERA_FACING_FRONT;
10580 break;
10581
10582 default:
10583 LOGE("Unknown position type %d for camera id:%d",
10584 gCamCapability[cameraId]->position, cameraId);
10585 rc = -1;
10586 break;
10587 }
10588
10589
10590 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010591#ifndef USE_HAL_3_3
10592 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10593#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010594 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010595#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010596 info->static_camera_characteristics = gStaticMetadata[cameraId];
10597
10598 //For now assume both cameras can operate independently.
10599 info->conflicting_devices = NULL;
10600 info->conflicting_devices_length = 0;
10601
10602 //resource cost is 100 * MIN(1.0, m/M),
10603 //where m is throughput requirement with maximum stream configuration
10604 //and M is CPP maximum throughput.
10605 float max_fps = 0.0;
10606 for (uint32_t i = 0;
10607 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10608 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10609 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10610 }
10611 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10612 gCamCapability[cameraId]->active_array_size.width *
10613 gCamCapability[cameraId]->active_array_size.height * max_fps /
10614 gCamCapability[cameraId]->max_pixel_bandwidth;
10615 info->resource_cost = 100 * MIN(1.0, ratio);
10616 LOGI("camera %d resource cost is %d", cameraId,
10617 info->resource_cost);
10618
10619 pthread_mutex_unlock(&gCamLock);
10620 return rc;
10621}
10622
10623/*===========================================================================
10624 * FUNCTION : translateCapabilityToMetadata
10625 *
10626 * DESCRIPTION: translate the capability into camera_metadata_t
10627 *
10628 * PARAMETERS : type of the request
10629 *
10630 *
10631 * RETURN : success: camera_metadata_t*
10632 * failure: NULL
10633 *
10634 *==========================================================================*/
10635camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10636{
10637 if (mDefaultMetadata[type] != NULL) {
10638 return mDefaultMetadata[type];
10639 }
10640 //first time we are handling this request
10641 //fill up the metadata structure using the wrapper class
10642 CameraMetadata settings;
10643 //translate from cam_capability_t to camera_metadata_tag_t
10644 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10645 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10646 int32_t defaultRequestID = 0;
10647 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10648
10649 /* OIS disable */
10650 char ois_prop[PROPERTY_VALUE_MAX];
10651 memset(ois_prop, 0, sizeof(ois_prop));
10652 property_get("persist.camera.ois.disable", ois_prop, "0");
10653 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10654
10655 /* Force video to use OIS */
10656 char videoOisProp[PROPERTY_VALUE_MAX];
10657 memset(videoOisProp, 0, sizeof(videoOisProp));
10658 property_get("persist.camera.ois.video", videoOisProp, "1");
10659 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010660
10661 // Hybrid AE enable/disable
10662 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10663 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10664 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10665 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10666
Thierry Strudel3d639192016-09-09 11:52:26 -070010667 uint8_t controlIntent = 0;
10668 uint8_t focusMode;
10669 uint8_t vsMode;
10670 uint8_t optStabMode;
10671 uint8_t cacMode;
10672 uint8_t edge_mode;
10673 uint8_t noise_red_mode;
10674 uint8_t tonemap_mode;
10675 bool highQualityModeEntryAvailable = FALSE;
10676 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010677 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010678 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10679 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010680 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010681
Thierry Strudel3d639192016-09-09 11:52:26 -070010682 switch (type) {
10683 case CAMERA3_TEMPLATE_PREVIEW:
10684 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10685 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10686 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10687 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10688 edge_mode = ANDROID_EDGE_MODE_FAST;
10689 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10690 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10691 break;
10692 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10693 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10694 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10695 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10696 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10697 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10698 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10699 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10700 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10701 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10702 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10703 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10704 highQualityModeEntryAvailable = TRUE;
10705 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10706 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10707 fastModeEntryAvailable = TRUE;
10708 }
10709 }
10710 if (highQualityModeEntryAvailable) {
10711 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10712 } else if (fastModeEntryAvailable) {
10713 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10714 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010715 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10716 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10717 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010718 break;
10719 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10720 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10721 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10722 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010723 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10724 edge_mode = ANDROID_EDGE_MODE_FAST;
10725 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10726 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10727 if (forceVideoOis)
10728 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10729 break;
10730 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10731 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10732 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10733 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010734 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10735 edge_mode = ANDROID_EDGE_MODE_FAST;
10736 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10737 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10738 if (forceVideoOis)
10739 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10740 break;
10741 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10742 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10743 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10744 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10745 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10746 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10747 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10748 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10749 break;
10750 case CAMERA3_TEMPLATE_MANUAL:
10751 edge_mode = ANDROID_EDGE_MODE_FAST;
10752 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10753 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10754 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10755 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10756 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10757 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10758 break;
10759 default:
10760 edge_mode = ANDROID_EDGE_MODE_FAST;
10761 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10762 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10763 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10764 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10765 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10766 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10767 break;
10768 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010769 // Set CAC to OFF if underlying device doesn't support
10770 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10771 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10772 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010773 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10774 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10775 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10776 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10777 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10778 }
10779 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010780 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010781
10782 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10783 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10784 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10785 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10786 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10787 || ois_disable)
10788 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10789 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010790 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010791
10792 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10793 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10794
10795 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10796 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10797
10798 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10799 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10800
10801 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10802 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10803
10804 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10805 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10806
10807 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10808 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10809
10810 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10811 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10812
10813 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10814 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10815
10816 /*flash*/
10817 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10818 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10819
10820 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10821 settings.update(ANDROID_FLASH_FIRING_POWER,
10822 &flashFiringLevel, 1);
10823
10824 /* lens */
10825 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10826 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10827
10828 if (gCamCapability[mCameraId]->filter_densities_count) {
10829 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10830 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10831 gCamCapability[mCameraId]->filter_densities_count);
10832 }
10833
10834 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10835 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10836
Thierry Strudel3d639192016-09-09 11:52:26 -070010837 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10838 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10839
10840 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10841 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10842
10843 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10844 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10845
10846 /* face detection (default to OFF) */
10847 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10848 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10849
Thierry Strudel54dc9782017-02-15 12:12:10 -080010850 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10851 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010852
10853 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10854 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10855
10856 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10857 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10858
Thierry Strudel3d639192016-09-09 11:52:26 -070010859
10860 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10861 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10862
10863 /* Exposure time(Update the Min Exposure Time)*/
10864 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10865 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10866
10867 /* frame duration */
10868 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10869 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10870
10871 /* sensitivity */
10872 static const int32_t default_sensitivity = 100;
10873 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010874#ifndef USE_HAL_3_3
10875 static const int32_t default_isp_sensitivity =
10876 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10877 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10878#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010879
10880 /*edge mode*/
10881 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10882
10883 /*noise reduction mode*/
10884 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10885
10886 /*color correction mode*/
10887 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10888 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10889
10890 /*transform matrix mode*/
10891 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10892
10893 int32_t scaler_crop_region[4];
10894 scaler_crop_region[0] = 0;
10895 scaler_crop_region[1] = 0;
10896 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10897 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10898 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10899
10900 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10901 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10902
10903 /*focus distance*/
10904 float focus_distance = 0.0;
10905 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10906
10907 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010908 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010909 float max_range = 0.0;
10910 float max_fixed_fps = 0.0;
10911 int32_t fps_range[2] = {0, 0};
10912 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10913 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010914 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10915 TEMPLATE_MAX_PREVIEW_FPS) {
10916 continue;
10917 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010918 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10919 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10920 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10921 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10922 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10923 if (range > max_range) {
10924 fps_range[0] =
10925 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10926 fps_range[1] =
10927 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10928 max_range = range;
10929 }
10930 } else {
10931 if (range < 0.01 && max_fixed_fps <
10932 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10933 fps_range[0] =
10934 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10935 fps_range[1] =
10936 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10937 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10938 }
10939 }
10940 }
10941 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10942
10943 /*precapture trigger*/
10944 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10945 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10946
10947 /*af trigger*/
10948 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10949 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10950
10951 /* ae & af regions */
10952 int32_t active_region[] = {
10953 gCamCapability[mCameraId]->active_array_size.left,
10954 gCamCapability[mCameraId]->active_array_size.top,
10955 gCamCapability[mCameraId]->active_array_size.left +
10956 gCamCapability[mCameraId]->active_array_size.width,
10957 gCamCapability[mCameraId]->active_array_size.top +
10958 gCamCapability[mCameraId]->active_array_size.height,
10959 0};
10960 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10961 sizeof(active_region) / sizeof(active_region[0]));
10962 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10963 sizeof(active_region) / sizeof(active_region[0]));
10964
10965 /* black level lock */
10966 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10967 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10968
Thierry Strudel3d639192016-09-09 11:52:26 -070010969 //special defaults for manual template
10970 if (type == CAMERA3_TEMPLATE_MANUAL) {
10971 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10972 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10973
10974 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10975 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10976
10977 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10978 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10979
10980 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10981 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10982
10983 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10984 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10985
10986 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10987 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10988 }
10989
10990
10991 /* TNR
10992 * We'll use this location to determine which modes TNR will be set.
10993 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10994 * This is not to be confused with linking on a per stream basis that decision
10995 * is still on per-session basis and will be handled as part of config stream
10996 */
10997 uint8_t tnr_enable = 0;
10998
10999 if (m_bTnrPreview || m_bTnrVideo) {
11000
11001 switch (type) {
11002 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11003 tnr_enable = 1;
11004 break;
11005
11006 default:
11007 tnr_enable = 0;
11008 break;
11009 }
11010
11011 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11012 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11013 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11014
11015 LOGD("TNR:%d with process plate %d for template:%d",
11016 tnr_enable, tnr_process_type, type);
11017 }
11018
11019 //Update Link tags to default
11020 int32_t sync_type = CAM_TYPE_STANDALONE;
11021 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11022
11023 int32_t is_main = 0; //this doesn't matter as app should overwrite
11024 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11025
11026 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11027
11028 /* CDS default */
11029 char prop[PROPERTY_VALUE_MAX];
11030 memset(prop, 0, sizeof(prop));
11031 property_get("persist.camera.CDS", prop, "Auto");
11032 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11033 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11034 if (CAM_CDS_MODE_MAX == cds_mode) {
11035 cds_mode = CAM_CDS_MODE_AUTO;
11036 }
11037
11038 /* Disabling CDS in templates which have TNR enabled*/
11039 if (tnr_enable)
11040 cds_mode = CAM_CDS_MODE_OFF;
11041
11042 int32_t mode = cds_mode;
11043 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011044
Thierry Strudel269c81a2016-10-12 12:13:59 -070011045 /* Manual Convergence AEC Speed is disabled by default*/
11046 float default_aec_speed = 0;
11047 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11048
11049 /* Manual Convergence AWB Speed is disabled by default*/
11050 float default_awb_speed = 0;
11051 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11052
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011053 // Set instant AEC to normal convergence by default
11054 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11055 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11056
Shuzhen Wang19463d72016-03-08 11:09:52 -080011057 /* hybrid ae */
11058 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11059
Thierry Strudel3d639192016-09-09 11:52:26 -070011060 mDefaultMetadata[type] = settings.release();
11061
11062 return mDefaultMetadata[type];
11063}
11064
11065/*===========================================================================
11066 * FUNCTION : setFrameParameters
11067 *
11068 * DESCRIPTION: set parameters per frame as requested in the metadata from
11069 * framework
11070 *
11071 * PARAMETERS :
11072 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011073 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011074 * @blob_request: Whether this request is a blob request or not
11075 *
11076 * RETURN : success: NO_ERROR
11077 * failure:
11078 *==========================================================================*/
11079int QCamera3HardwareInterface::setFrameParameters(
11080 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011081 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011082 int blob_request,
11083 uint32_t snapshotStreamId)
11084{
11085 /*translate from camera_metadata_t type to parm_type_t*/
11086 int rc = 0;
11087 int32_t hal_version = CAM_HAL_V3;
11088
11089 clear_metadata_buffer(mParameters);
11090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11091 LOGE("Failed to set hal version in the parameters");
11092 return BAD_VALUE;
11093 }
11094
11095 /*we need to update the frame number in the parameters*/
11096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11097 request->frame_number)) {
11098 LOGE("Failed to set the frame number in the parameters");
11099 return BAD_VALUE;
11100 }
11101
11102 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011104 LOGE("Failed to set stream type mask in the parameters");
11105 return BAD_VALUE;
11106 }
11107
11108 if (mUpdateDebugLevel) {
11109 uint32_t dummyDebugLevel = 0;
11110 /* The value of dummyDebugLevel is irrelavent. On
11111 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11113 dummyDebugLevel)) {
11114 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11115 return BAD_VALUE;
11116 }
11117 mUpdateDebugLevel = false;
11118 }
11119
11120 if(request->settings != NULL){
11121 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11122 if (blob_request)
11123 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11124 }
11125
11126 return rc;
11127}
11128
11129/*===========================================================================
11130 * FUNCTION : setReprocParameters
11131 *
11132 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11133 * return it.
11134 *
11135 * PARAMETERS :
11136 * @request : request that needs to be serviced
11137 *
11138 * RETURN : success: NO_ERROR
11139 * failure:
11140 *==========================================================================*/
11141int32_t QCamera3HardwareInterface::setReprocParameters(
11142 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11143 uint32_t snapshotStreamId)
11144{
11145 /*translate from camera_metadata_t type to parm_type_t*/
11146 int rc = 0;
11147
11148 if (NULL == request->settings){
11149 LOGE("Reprocess settings cannot be NULL");
11150 return BAD_VALUE;
11151 }
11152
11153 if (NULL == reprocParam) {
11154 LOGE("Invalid reprocessing metadata buffer");
11155 return BAD_VALUE;
11156 }
11157 clear_metadata_buffer(reprocParam);
11158
11159 /*we need to update the frame number in the parameters*/
11160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11161 request->frame_number)) {
11162 LOGE("Failed to set the frame number in the parameters");
11163 return BAD_VALUE;
11164 }
11165
11166 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11167 if (rc < 0) {
11168 LOGE("Failed to translate reproc request");
11169 return rc;
11170 }
11171
11172 CameraMetadata frame_settings;
11173 frame_settings = request->settings;
11174 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11175 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11176 int32_t *crop_count =
11177 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11178 int32_t *crop_data =
11179 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11180 int32_t *roi_map =
11181 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11182 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11183 cam_crop_data_t crop_meta;
11184 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11185 crop_meta.num_of_streams = 1;
11186 crop_meta.crop_info[0].crop.left = crop_data[0];
11187 crop_meta.crop_info[0].crop.top = crop_data[1];
11188 crop_meta.crop_info[0].crop.width = crop_data[2];
11189 crop_meta.crop_info[0].crop.height = crop_data[3];
11190
11191 crop_meta.crop_info[0].roi_map.left =
11192 roi_map[0];
11193 crop_meta.crop_info[0].roi_map.top =
11194 roi_map[1];
11195 crop_meta.crop_info[0].roi_map.width =
11196 roi_map[2];
11197 crop_meta.crop_info[0].roi_map.height =
11198 roi_map[3];
11199
11200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11201 rc = BAD_VALUE;
11202 }
11203 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11204 request->input_buffer->stream,
11205 crop_meta.crop_info[0].crop.left,
11206 crop_meta.crop_info[0].crop.top,
11207 crop_meta.crop_info[0].crop.width,
11208 crop_meta.crop_info[0].crop.height);
11209 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11210 request->input_buffer->stream,
11211 crop_meta.crop_info[0].roi_map.left,
11212 crop_meta.crop_info[0].roi_map.top,
11213 crop_meta.crop_info[0].roi_map.width,
11214 crop_meta.crop_info[0].roi_map.height);
11215 } else {
11216 LOGE("Invalid reprocess crop count %d!", *crop_count);
11217 }
11218 } else {
11219 LOGE("No crop data from matching output stream");
11220 }
11221
11222 /* These settings are not needed for regular requests so handle them specially for
11223 reprocess requests; information needed for EXIF tags */
11224 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11225 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11226 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11227 if (NAME_NOT_FOUND != val) {
11228 uint32_t flashMode = (uint32_t)val;
11229 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11230 rc = BAD_VALUE;
11231 }
11232 } else {
11233 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11234 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11235 }
11236 } else {
11237 LOGH("No flash mode in reprocess settings");
11238 }
11239
11240 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11241 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11243 rc = BAD_VALUE;
11244 }
11245 } else {
11246 LOGH("No flash state in reprocess settings");
11247 }
11248
11249 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11250 uint8_t *reprocessFlags =
11251 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11252 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11253 *reprocessFlags)) {
11254 rc = BAD_VALUE;
11255 }
11256 }
11257
Thierry Strudel54dc9782017-02-15 12:12:10 -080011258 // Add exif debug data to internal metadata
11259 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11260 mm_jpeg_debug_exif_params_t *debug_params =
11261 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11262 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11263 // AE
11264 if (debug_params->ae_debug_params_valid == TRUE) {
11265 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11266 debug_params->ae_debug_params);
11267 }
11268 // AWB
11269 if (debug_params->awb_debug_params_valid == TRUE) {
11270 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11271 debug_params->awb_debug_params);
11272 }
11273 // AF
11274 if (debug_params->af_debug_params_valid == TRUE) {
11275 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11276 debug_params->af_debug_params);
11277 }
11278 // ASD
11279 if (debug_params->asd_debug_params_valid == TRUE) {
11280 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11281 debug_params->asd_debug_params);
11282 }
11283 // Stats
11284 if (debug_params->stats_debug_params_valid == TRUE) {
11285 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11286 debug_params->stats_debug_params);
11287 }
11288 // BE Stats
11289 if (debug_params->bestats_debug_params_valid == TRUE) {
11290 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11291 debug_params->bestats_debug_params);
11292 }
11293 // BHIST
11294 if (debug_params->bhist_debug_params_valid == TRUE) {
11295 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11296 debug_params->bhist_debug_params);
11297 }
11298 // 3A Tuning
11299 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11300 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11301 debug_params->q3a_tuning_debug_params);
11302 }
11303 }
11304
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011305 // Add metadata which reprocess needs
11306 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11307 cam_reprocess_info_t *repro_info =
11308 (cam_reprocess_info_t *)frame_settings.find
11309 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011310 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011311 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011312 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011313 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011314 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011315 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011316 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011317 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011318 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011319 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011320 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011321 repro_info->pipeline_flip);
11322 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11323 repro_info->af_roi);
11324 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11325 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011326 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11327 CAM_INTF_PARM_ROTATION metadata then has been added in
11328 translateToHalMetadata. HAL need to keep this new rotation
11329 metadata. Otherwise, the old rotation info saved in the vendor tag
11330 would be used */
11331 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11332 CAM_INTF_PARM_ROTATION, reprocParam) {
11333 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11334 } else {
11335 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011336 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011337 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011338 }
11339
11340 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11341 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11342 roi.width and roi.height would be the final JPEG size.
11343 For now, HAL only checks this for reprocess request */
11344 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11345 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11346 uint8_t *enable =
11347 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11348 if (*enable == TRUE) {
11349 int32_t *crop_data =
11350 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11351 cam_stream_crop_info_t crop_meta;
11352 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11353 crop_meta.stream_id = 0;
11354 crop_meta.crop.left = crop_data[0];
11355 crop_meta.crop.top = crop_data[1];
11356 crop_meta.crop.width = crop_data[2];
11357 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011358 // The JPEG crop roi should match cpp output size
11359 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11360 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11361 crop_meta.roi_map.left = 0;
11362 crop_meta.roi_map.top = 0;
11363 crop_meta.roi_map.width = cpp_crop->crop.width;
11364 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011365 }
11366 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11367 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011368 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011369 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011370 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11371 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011372 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011373 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11374
11375 // Add JPEG scale information
11376 cam_dimension_t scale_dim;
11377 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11378 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11379 int32_t *roi =
11380 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11381 scale_dim.width = roi[2];
11382 scale_dim.height = roi[3];
11383 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11384 scale_dim);
11385 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11386 scale_dim.width, scale_dim.height, mCameraId);
11387 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011388 }
11389 }
11390
11391 return rc;
11392}
11393
11394/*===========================================================================
11395 * FUNCTION : saveRequestSettings
11396 *
11397 * DESCRIPTION: Add any settings that might have changed to the request settings
11398 * and save the settings to be applied on the frame
11399 *
11400 * PARAMETERS :
11401 * @jpegMetadata : the extracted and/or modified jpeg metadata
11402 * @request : request with initial settings
11403 *
11404 * RETURN :
11405 * camera_metadata_t* : pointer to the saved request settings
11406 *==========================================================================*/
11407camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11408 const CameraMetadata &jpegMetadata,
11409 camera3_capture_request_t *request)
11410{
11411 camera_metadata_t *resultMetadata;
11412 CameraMetadata camMetadata;
11413 camMetadata = request->settings;
11414
11415 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11416 int32_t thumbnail_size[2];
11417 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11418 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11419 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11420 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11421 }
11422
11423 if (request->input_buffer != NULL) {
11424 uint8_t reprocessFlags = 1;
11425 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11426 (uint8_t*)&reprocessFlags,
11427 sizeof(reprocessFlags));
11428 }
11429
11430 resultMetadata = camMetadata.release();
11431 return resultMetadata;
11432}
11433
11434/*===========================================================================
11435 * FUNCTION : setHalFpsRange
11436 *
11437 * DESCRIPTION: set FPS range parameter
11438 *
11439 *
11440 * PARAMETERS :
11441 * @settings : Metadata from framework
11442 * @hal_metadata: Metadata buffer
11443 *
11444 *
11445 * RETURN : success: NO_ERROR
11446 * failure:
11447 *==========================================================================*/
11448int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11449 metadata_buffer_t *hal_metadata)
11450{
11451 int32_t rc = NO_ERROR;
11452 cam_fps_range_t fps_range;
11453 fps_range.min_fps = (float)
11454 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11455 fps_range.max_fps = (float)
11456 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11457 fps_range.video_min_fps = fps_range.min_fps;
11458 fps_range.video_max_fps = fps_range.max_fps;
11459
11460 LOGD("aeTargetFpsRange fps: [%f %f]",
11461 fps_range.min_fps, fps_range.max_fps);
11462 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11463 * follows:
11464 * ---------------------------------------------------------------|
11465 * Video stream is absent in configure_streams |
11466 * (Camcorder preview before the first video record |
11467 * ---------------------------------------------------------------|
11468 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11469 * | | | vid_min/max_fps|
11470 * ---------------------------------------------------------------|
11471 * NO | [ 30, 240] | 240 | [240, 240] |
11472 * |-------------|-------------|----------------|
11473 * | [240, 240] | 240 | [240, 240] |
11474 * ---------------------------------------------------------------|
11475 * Video stream is present in configure_streams |
11476 * ---------------------------------------------------------------|
11477 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11478 * | | | vid_min/max_fps|
11479 * ---------------------------------------------------------------|
11480 * NO | [ 30, 240] | 240 | [240, 240] |
11481 * (camcorder prev |-------------|-------------|----------------|
11482 * after video rec | [240, 240] | 240 | [240, 240] |
11483 * is stopped) | | | |
11484 * ---------------------------------------------------------------|
11485 * YES | [ 30, 240] | 240 | [240, 240] |
11486 * |-------------|-------------|----------------|
11487 * | [240, 240] | 240 | [240, 240] |
11488 * ---------------------------------------------------------------|
11489 * When Video stream is absent in configure_streams,
11490 * preview fps = sensor_fps / batchsize
11491 * Eg: for 240fps at batchSize 4, preview = 60fps
11492 * for 120fps at batchSize 4, preview = 30fps
11493 *
11494 * When video stream is present in configure_streams, preview fps is as per
11495 * the ratio of preview buffers to video buffers requested in process
11496 * capture request
11497 */
11498 mBatchSize = 0;
11499 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11500 fps_range.min_fps = fps_range.video_max_fps;
11501 fps_range.video_min_fps = fps_range.video_max_fps;
11502 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11503 fps_range.max_fps);
11504 if (NAME_NOT_FOUND != val) {
11505 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11507 return BAD_VALUE;
11508 }
11509
11510 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11511 /* If batchmode is currently in progress and the fps changes,
11512 * set the flag to restart the sensor */
11513 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11514 (mHFRVideoFps != fps_range.max_fps)) {
11515 mNeedSensorRestart = true;
11516 }
11517 mHFRVideoFps = fps_range.max_fps;
11518 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11519 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11520 mBatchSize = MAX_HFR_BATCH_SIZE;
11521 }
11522 }
11523 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11524
11525 }
11526 } else {
11527 /* HFR mode is session param in backend/ISP. This should be reset when
11528 * in non-HFR mode */
11529 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11531 return BAD_VALUE;
11532 }
11533 }
11534 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11535 return BAD_VALUE;
11536 }
11537 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11538 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11539 return rc;
11540}
11541
11542/*===========================================================================
11543 * FUNCTION : translateToHalMetadata
11544 *
11545 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11546 *
11547 *
11548 * PARAMETERS :
11549 * @request : request sent from framework
11550 *
11551 *
11552 * RETURN : success: NO_ERROR
11553 * failure:
11554 *==========================================================================*/
11555int QCamera3HardwareInterface::translateToHalMetadata
11556 (const camera3_capture_request_t *request,
11557 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011558 uint32_t snapshotStreamId) {
11559 if (request == nullptr || hal_metadata == nullptr) {
11560 return BAD_VALUE;
11561 }
11562
11563 int64_t minFrameDuration = getMinFrameDuration(request);
11564
11565 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11566 minFrameDuration);
11567}
11568
11569int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11570 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11571 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11572
Thierry Strudel3d639192016-09-09 11:52:26 -070011573 int rc = 0;
11574 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011575 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011576
11577 /* Do not change the order of the following list unless you know what you are
11578 * doing.
11579 * The order is laid out in such a way that parameters in the front of the table
11580 * may be used to override the parameters later in the table. Examples are:
11581 * 1. META_MODE should precede AEC/AWB/AF MODE
11582 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11583 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11584 * 4. Any mode should precede it's corresponding settings
11585 */
11586 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11587 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11589 rc = BAD_VALUE;
11590 }
11591 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11592 if (rc != NO_ERROR) {
11593 LOGE("extractSceneMode failed");
11594 }
11595 }
11596
11597 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11598 uint8_t fwk_aeMode =
11599 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11600 uint8_t aeMode;
11601 int32_t redeye;
11602
11603 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11604 aeMode = CAM_AE_MODE_OFF;
11605 } else {
11606 aeMode = CAM_AE_MODE_ON;
11607 }
11608 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11609 redeye = 1;
11610 } else {
11611 redeye = 0;
11612 }
11613
11614 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11615 fwk_aeMode);
11616 if (NAME_NOT_FOUND != val) {
11617 int32_t flashMode = (int32_t)val;
11618 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11619 }
11620
11621 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11623 rc = BAD_VALUE;
11624 }
11625 }
11626
11627 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11628 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11629 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11630 fwk_whiteLevel);
11631 if (NAME_NOT_FOUND != val) {
11632 uint8_t whiteLevel = (uint8_t)val;
11633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11634 rc = BAD_VALUE;
11635 }
11636 }
11637 }
11638
11639 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11640 uint8_t fwk_cacMode =
11641 frame_settings.find(
11642 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11643 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11644 fwk_cacMode);
11645 if (NAME_NOT_FOUND != val) {
11646 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11647 bool entryAvailable = FALSE;
11648 // Check whether Frameworks set CAC mode is supported in device or not
11649 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11650 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11651 entryAvailable = TRUE;
11652 break;
11653 }
11654 }
11655 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11656 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11657 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11658 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11659 if (entryAvailable == FALSE) {
11660 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11661 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11662 } else {
11663 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11664 // High is not supported and so set the FAST as spec say's underlying
11665 // device implementation can be the same for both modes.
11666 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11667 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11668 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11669 // in order to avoid the fps drop due to high quality
11670 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11671 } else {
11672 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11673 }
11674 }
11675 }
11676 LOGD("Final cacMode is %d", cacMode);
11677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11678 rc = BAD_VALUE;
11679 }
11680 } else {
11681 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11682 }
11683 }
11684
Thierry Strudel2896d122017-02-23 19:18:03 -080011685 char af_value[PROPERTY_VALUE_MAX];
11686 property_get("persist.camera.af.infinity", af_value, "0");
11687
Jason Lee84ae9972017-02-24 13:24:24 -080011688 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011689 if (atoi(af_value) == 0) {
11690 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011691 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011692 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11693 fwk_focusMode);
11694 if (NAME_NOT_FOUND != val) {
11695 uint8_t focusMode = (uint8_t)val;
11696 LOGD("set focus mode %d", focusMode);
11697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11698 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11699 rc = BAD_VALUE;
11700 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011701 }
11702 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011703 } else {
11704 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11705 LOGE("Focus forced to infinity %d", focusMode);
11706 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11707 rc = BAD_VALUE;
11708 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011709 }
11710
Jason Lee84ae9972017-02-24 13:24:24 -080011711 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11712 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011713 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11715 focalDistance)) {
11716 rc = BAD_VALUE;
11717 }
11718 }
11719
11720 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11721 uint8_t fwk_antibandingMode =
11722 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11723 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11724 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11725 if (NAME_NOT_FOUND != val) {
11726 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011727 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11728 if (m60HzZone) {
11729 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11730 } else {
11731 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11732 }
11733 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11735 hal_antibandingMode)) {
11736 rc = BAD_VALUE;
11737 }
11738 }
11739 }
11740
11741 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11742 int32_t expCompensation = frame_settings.find(
11743 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11744 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11745 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11746 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11747 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011748 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11750 expCompensation)) {
11751 rc = BAD_VALUE;
11752 }
11753 }
11754
11755 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11756 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11758 rc = BAD_VALUE;
11759 }
11760 }
11761 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11762 rc = setHalFpsRange(frame_settings, hal_metadata);
11763 if (rc != NO_ERROR) {
11764 LOGE("setHalFpsRange failed");
11765 }
11766 }
11767
11768 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11769 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11771 rc = BAD_VALUE;
11772 }
11773 }
11774
11775 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11776 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11777 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11778 fwk_effectMode);
11779 if (NAME_NOT_FOUND != val) {
11780 uint8_t effectMode = (uint8_t)val;
11781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11782 rc = BAD_VALUE;
11783 }
11784 }
11785 }
11786
11787 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11788 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11790 colorCorrectMode)) {
11791 rc = BAD_VALUE;
11792 }
11793 }
11794
11795 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11796 cam_color_correct_gains_t colorCorrectGains;
11797 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11798 colorCorrectGains.gains[i] =
11799 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11800 }
11801 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11802 colorCorrectGains)) {
11803 rc = BAD_VALUE;
11804 }
11805 }
11806
11807 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11808 cam_color_correct_matrix_t colorCorrectTransform;
11809 cam_rational_type_t transform_elem;
11810 size_t num = 0;
11811 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11812 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11813 transform_elem.numerator =
11814 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11815 transform_elem.denominator =
11816 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11817 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11818 num++;
11819 }
11820 }
11821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11822 colorCorrectTransform)) {
11823 rc = BAD_VALUE;
11824 }
11825 }
11826
11827 cam_trigger_t aecTrigger;
11828 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11829 aecTrigger.trigger_id = -1;
11830 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11831 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11832 aecTrigger.trigger =
11833 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11834 aecTrigger.trigger_id =
11835 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11837 aecTrigger)) {
11838 rc = BAD_VALUE;
11839 }
11840 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11841 aecTrigger.trigger, aecTrigger.trigger_id);
11842 }
11843
11844 /*af_trigger must come with a trigger id*/
11845 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11846 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11847 cam_trigger_t af_trigger;
11848 af_trigger.trigger =
11849 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11850 af_trigger.trigger_id =
11851 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11853 rc = BAD_VALUE;
11854 }
11855 LOGD("AfTrigger: %d AfTriggerID: %d",
11856 af_trigger.trigger, af_trigger.trigger_id);
11857 }
11858
11859 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11860 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11862 rc = BAD_VALUE;
11863 }
11864 }
11865 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11866 cam_edge_application_t edge_application;
11867 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011868
Thierry Strudel3d639192016-09-09 11:52:26 -070011869 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11870 edge_application.sharpness = 0;
11871 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011872 edge_application.sharpness =
11873 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11874 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11875 int32_t sharpness =
11876 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11877 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11878 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11879 LOGD("Setting edge mode sharpness %d", sharpness);
11880 edge_application.sharpness = sharpness;
11881 }
11882 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011883 }
11884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11885 rc = BAD_VALUE;
11886 }
11887 }
11888
11889 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11890 int32_t respectFlashMode = 1;
11891 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11892 uint8_t fwk_aeMode =
11893 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11894 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11895 respectFlashMode = 0;
11896 LOGH("AE Mode controls flash, ignore android.flash.mode");
11897 }
11898 }
11899 if (respectFlashMode) {
11900 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11901 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11902 LOGH("flash mode after mapping %d", val);
11903 // To check: CAM_INTF_META_FLASH_MODE usage
11904 if (NAME_NOT_FOUND != val) {
11905 uint8_t flashMode = (uint8_t)val;
11906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11907 rc = BAD_VALUE;
11908 }
11909 }
11910 }
11911 }
11912
11913 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11914 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11916 rc = BAD_VALUE;
11917 }
11918 }
11919
11920 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11921 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11923 flashFiringTime)) {
11924 rc = BAD_VALUE;
11925 }
11926 }
11927
11928 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11929 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11931 hotPixelMode)) {
11932 rc = BAD_VALUE;
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11937 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11939 lensAperture)) {
11940 rc = BAD_VALUE;
11941 }
11942 }
11943
11944 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11945 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11947 filterDensity)) {
11948 rc = BAD_VALUE;
11949 }
11950 }
11951
11952 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11953 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11955 focalLength)) {
11956 rc = BAD_VALUE;
11957 }
11958 }
11959
11960 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11961 uint8_t optStabMode =
11962 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11964 optStabMode)) {
11965 rc = BAD_VALUE;
11966 }
11967 }
11968
11969 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11970 uint8_t videoStabMode =
11971 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11972 LOGD("videoStabMode from APP = %d", videoStabMode);
11973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11974 videoStabMode)) {
11975 rc = BAD_VALUE;
11976 }
11977 }
11978
11979
11980 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11981 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11983 noiseRedMode)) {
11984 rc = BAD_VALUE;
11985 }
11986 }
11987
11988 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11989 float reprocessEffectiveExposureFactor =
11990 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11992 reprocessEffectiveExposureFactor)) {
11993 rc = BAD_VALUE;
11994 }
11995 }
11996
11997 cam_crop_region_t scalerCropRegion;
11998 bool scalerCropSet = false;
11999 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12000 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12001 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12002 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12003 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12004
12005 // Map coordinate system from active array to sensor output.
12006 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12007 scalerCropRegion.width, scalerCropRegion.height);
12008
12009 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12010 scalerCropRegion)) {
12011 rc = BAD_VALUE;
12012 }
12013 scalerCropSet = true;
12014 }
12015
12016 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12017 int64_t sensorExpTime =
12018 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12019 LOGD("setting sensorExpTime %lld", sensorExpTime);
12020 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12021 sensorExpTime)) {
12022 rc = BAD_VALUE;
12023 }
12024 }
12025
12026 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12027 int64_t sensorFrameDuration =
12028 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012029 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12030 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12031 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12032 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12033 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12034 sensorFrameDuration)) {
12035 rc = BAD_VALUE;
12036 }
12037 }
12038
12039 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12040 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12041 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12042 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12043 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12044 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12045 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12047 sensorSensitivity)) {
12048 rc = BAD_VALUE;
12049 }
12050 }
12051
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012052#ifndef USE_HAL_3_3
12053 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12054 int32_t ispSensitivity =
12055 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12056 if (ispSensitivity <
12057 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12058 ispSensitivity =
12059 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12060 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12061 }
12062 if (ispSensitivity >
12063 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12064 ispSensitivity =
12065 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12066 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12067 }
12068 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12069 ispSensitivity)) {
12070 rc = BAD_VALUE;
12071 }
12072 }
12073#endif
12074
Thierry Strudel3d639192016-09-09 11:52:26 -070012075 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12076 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12078 rc = BAD_VALUE;
12079 }
12080 }
12081
12082 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12083 uint8_t fwk_facedetectMode =
12084 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12085
12086 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12087 fwk_facedetectMode);
12088
12089 if (NAME_NOT_FOUND != val) {
12090 uint8_t facedetectMode = (uint8_t)val;
12091 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12092 facedetectMode)) {
12093 rc = BAD_VALUE;
12094 }
12095 }
12096 }
12097
Thierry Strudel54dc9782017-02-15 12:12:10 -080012098 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012099 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012100 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12102 histogramMode)) {
12103 rc = BAD_VALUE;
12104 }
12105 }
12106
12107 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12108 uint8_t sharpnessMapMode =
12109 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12111 sharpnessMapMode)) {
12112 rc = BAD_VALUE;
12113 }
12114 }
12115
12116 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12117 uint8_t tonemapMode =
12118 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12120 rc = BAD_VALUE;
12121 }
12122 }
12123 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12124 /*All tonemap channels will have the same number of points*/
12125 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12126 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12127 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12128 cam_rgb_tonemap_curves tonemapCurves;
12129 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12130 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12131 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12132 tonemapCurves.tonemap_points_cnt,
12133 CAM_MAX_TONEMAP_CURVE_SIZE);
12134 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12135 }
12136
12137 /* ch0 = G*/
12138 size_t point = 0;
12139 cam_tonemap_curve_t tonemapCurveGreen;
12140 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12141 for (size_t j = 0; j < 2; j++) {
12142 tonemapCurveGreen.tonemap_points[i][j] =
12143 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12144 point++;
12145 }
12146 }
12147 tonemapCurves.curves[0] = tonemapCurveGreen;
12148
12149 /* ch 1 = B */
12150 point = 0;
12151 cam_tonemap_curve_t tonemapCurveBlue;
12152 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12153 for (size_t j = 0; j < 2; j++) {
12154 tonemapCurveBlue.tonemap_points[i][j] =
12155 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12156 point++;
12157 }
12158 }
12159 tonemapCurves.curves[1] = tonemapCurveBlue;
12160
12161 /* ch 2 = R */
12162 point = 0;
12163 cam_tonemap_curve_t tonemapCurveRed;
12164 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12165 for (size_t j = 0; j < 2; j++) {
12166 tonemapCurveRed.tonemap_points[i][j] =
12167 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12168 point++;
12169 }
12170 }
12171 tonemapCurves.curves[2] = tonemapCurveRed;
12172
12173 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12174 tonemapCurves)) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178
12179 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12180 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12182 captureIntent)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12188 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12190 blackLevelLock)) {
12191 rc = BAD_VALUE;
12192 }
12193 }
12194
12195 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12196 uint8_t lensShadingMapMode =
12197 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12199 lensShadingMapMode)) {
12200 rc = BAD_VALUE;
12201 }
12202 }
12203
12204 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12205 cam_area_t roi;
12206 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012207 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012208
12209 // Map coordinate system from active array to sensor output.
12210 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12211 roi.rect.height);
12212
12213 if (scalerCropSet) {
12214 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12215 }
12216 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12217 rc = BAD_VALUE;
12218 }
12219 }
12220
12221 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12222 cam_area_t roi;
12223 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012224 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012225
12226 // Map coordinate system from active array to sensor output.
12227 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12228 roi.rect.height);
12229
12230 if (scalerCropSet) {
12231 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12232 }
12233 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237
12238 // CDS for non-HFR non-video mode
12239 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12240 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12241 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12242 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12243 LOGE("Invalid CDS mode %d!", *fwk_cds);
12244 } else {
12245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12246 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12247 rc = BAD_VALUE;
12248 }
12249 }
12250 }
12251
Thierry Strudel04e026f2016-10-10 11:27:36 -070012252 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012253 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012254 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012255 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12256 }
12257 if (m_bVideoHdrEnabled)
12258 vhdr = CAM_VIDEO_HDR_MODE_ON;
12259
Thierry Strudel54dc9782017-02-15 12:12:10 -080012260 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12261
12262 if(vhdr != curr_hdr_state)
12263 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12264
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012265 rc = setVideoHdrMode(mParameters, vhdr);
12266 if (rc != NO_ERROR) {
12267 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012268 }
12269
12270 //IR
12271 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12272 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12273 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012274 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12275 uint8_t isIRon = 0;
12276
12277 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012278 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12279 LOGE("Invalid IR mode %d!", fwk_ir);
12280 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012281 if(isIRon != curr_ir_state )
12282 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12283
Thierry Strudel04e026f2016-10-10 11:27:36 -070012284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12285 CAM_INTF_META_IR_MODE, fwk_ir)) {
12286 rc = BAD_VALUE;
12287 }
12288 }
12289 }
12290
Thierry Strudel54dc9782017-02-15 12:12:10 -080012291 //Binning Correction Mode
12292 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12293 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12294 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12295 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12296 || (0 > fwk_binning_correction)) {
12297 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12298 } else {
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12300 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12301 rc = BAD_VALUE;
12302 }
12303 }
12304 }
12305
Thierry Strudel269c81a2016-10-12 12:13:59 -070012306 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12307 float aec_speed;
12308 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12309 LOGD("AEC Speed :%f", aec_speed);
12310 if ( aec_speed < 0 ) {
12311 LOGE("Invalid AEC mode %f!", aec_speed);
12312 } else {
12313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12314 aec_speed)) {
12315 rc = BAD_VALUE;
12316 }
12317 }
12318 }
12319
12320 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12321 float awb_speed;
12322 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12323 LOGD("AWB Speed :%f", awb_speed);
12324 if ( awb_speed < 0 ) {
12325 LOGE("Invalid AWB mode %f!", awb_speed);
12326 } else {
12327 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12328 awb_speed)) {
12329 rc = BAD_VALUE;
12330 }
12331 }
12332 }
12333
Thierry Strudel3d639192016-09-09 11:52:26 -070012334 // TNR
12335 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12336 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12337 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012338 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012339 cam_denoise_param_t tnr;
12340 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12341 tnr.process_plates =
12342 (cam_denoise_process_type_t)frame_settings.find(
12343 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12344 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012345
12346 if(b_TnrRequested != curr_tnr_state)
12347 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12348
Thierry Strudel3d639192016-09-09 11:52:26 -070012349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12350 rc = BAD_VALUE;
12351 }
12352 }
12353
Thierry Strudel54dc9782017-02-15 12:12:10 -080012354 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012355 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012356 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12358 *exposure_metering_mode)) {
12359 rc = BAD_VALUE;
12360 }
12361 }
12362
Thierry Strudel3d639192016-09-09 11:52:26 -070012363 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12364 int32_t fwk_testPatternMode =
12365 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12366 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12367 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12368
12369 if (NAME_NOT_FOUND != testPatternMode) {
12370 cam_test_pattern_data_t testPatternData;
12371 memset(&testPatternData, 0, sizeof(testPatternData));
12372 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12373 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12374 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12375 int32_t *fwk_testPatternData =
12376 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12377 testPatternData.r = fwk_testPatternData[0];
12378 testPatternData.b = fwk_testPatternData[3];
12379 switch (gCamCapability[mCameraId]->color_arrangement) {
12380 case CAM_FILTER_ARRANGEMENT_RGGB:
12381 case CAM_FILTER_ARRANGEMENT_GRBG:
12382 testPatternData.gr = fwk_testPatternData[1];
12383 testPatternData.gb = fwk_testPatternData[2];
12384 break;
12385 case CAM_FILTER_ARRANGEMENT_GBRG:
12386 case CAM_FILTER_ARRANGEMENT_BGGR:
12387 testPatternData.gr = fwk_testPatternData[2];
12388 testPatternData.gb = fwk_testPatternData[1];
12389 break;
12390 default:
12391 LOGE("color arrangement %d is not supported",
12392 gCamCapability[mCameraId]->color_arrangement);
12393 break;
12394 }
12395 }
12396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12397 testPatternData)) {
12398 rc = BAD_VALUE;
12399 }
12400 } else {
12401 LOGE("Invalid framework sensor test pattern mode %d",
12402 fwk_testPatternMode);
12403 }
12404 }
12405
12406 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12407 size_t count = 0;
12408 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12409 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12410 gps_coords.data.d, gps_coords.count, count);
12411 if (gps_coords.count != count) {
12412 rc = BAD_VALUE;
12413 }
12414 }
12415
12416 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12417 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12418 size_t count = 0;
12419 const char *gps_methods_src = (const char *)
12420 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12421 memset(gps_methods, '\0', sizeof(gps_methods));
12422 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12423 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12424 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12425 if (GPS_PROCESSING_METHOD_SIZE != count) {
12426 rc = BAD_VALUE;
12427 }
12428 }
12429
12430 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12431 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12433 gps_timestamp)) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437
12438 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12439 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12440 cam_rotation_info_t rotation_info;
12441 if (orientation == 0) {
12442 rotation_info.rotation = ROTATE_0;
12443 } else if (orientation == 90) {
12444 rotation_info.rotation = ROTATE_90;
12445 } else if (orientation == 180) {
12446 rotation_info.rotation = ROTATE_180;
12447 } else if (orientation == 270) {
12448 rotation_info.rotation = ROTATE_270;
12449 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012450 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012451 rotation_info.streamId = snapshotStreamId;
12452 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12454 rc = BAD_VALUE;
12455 }
12456 }
12457
12458 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12459 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12461 rc = BAD_VALUE;
12462 }
12463 }
12464
12465 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12466 uint32_t thumb_quality = (uint32_t)
12467 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12469 thumb_quality)) {
12470 rc = BAD_VALUE;
12471 }
12472 }
12473
12474 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12475 cam_dimension_t dim;
12476 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12477 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12478 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12479 rc = BAD_VALUE;
12480 }
12481 }
12482
12483 // Internal metadata
12484 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12485 size_t count = 0;
12486 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12487 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12488 privatedata.data.i32, privatedata.count, count);
12489 if (privatedata.count != count) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012494 // ISO/Exposure Priority
12495 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12496 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12497 cam_priority_mode_t mode =
12498 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12499 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12500 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12501 use_iso_exp_pty.previewOnly = FALSE;
12502 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12503 use_iso_exp_pty.value = *ptr;
12504
12505 if(CAM_ISO_PRIORITY == mode) {
12506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12507 use_iso_exp_pty)) {
12508 rc = BAD_VALUE;
12509 }
12510 }
12511 else {
12512 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12513 use_iso_exp_pty)) {
12514 rc = BAD_VALUE;
12515 }
12516 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012517
12518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12519 rc = BAD_VALUE;
12520 }
12521 }
12522 } else {
12523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12524 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012525 }
12526 }
12527
12528 // Saturation
12529 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12530 int32_t* use_saturation =
12531 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12532 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12533 rc = BAD_VALUE;
12534 }
12535 }
12536
Thierry Strudel3d639192016-09-09 11:52:26 -070012537 // EV step
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12539 gCamCapability[mCameraId]->exp_compensation_step)) {
12540 rc = BAD_VALUE;
12541 }
12542
12543 // CDS info
12544 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12545 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12546 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12547
12548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12549 CAM_INTF_META_CDS_DATA, *cdsData)) {
12550 rc = BAD_VALUE;
12551 }
12552 }
12553
Shuzhen Wang19463d72016-03-08 11:09:52 -080012554 // Hybrid AE
12555 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12556 uint8_t *hybrid_ae = (uint8_t *)
12557 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12558
12559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12560 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564
Shuzhen Wang14415f52016-11-16 18:26:18 -080012565 // Histogram
12566 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12567 uint8_t histogramMode =
12568 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12570 histogramMode)) {
12571 rc = BAD_VALUE;
12572 }
12573 }
12574
12575 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12576 int32_t histogramBins =
12577 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12578 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12579 histogramBins)) {
12580 rc = BAD_VALUE;
12581 }
12582 }
12583
Thierry Strudel3d639192016-09-09 11:52:26 -070012584 return rc;
12585}
12586
12587/*===========================================================================
12588 * FUNCTION : captureResultCb
12589 *
12590 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12591 *
12592 * PARAMETERS :
12593 * @frame : frame information from mm-camera-interface
12594 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12595 * @userdata: userdata
12596 *
12597 * RETURN : NONE
12598 *==========================================================================*/
12599void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12600 camera3_stream_buffer_t *buffer,
12601 uint32_t frame_number, bool isInputBuffer, void *userdata)
12602{
12603 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12604 if (hw == NULL) {
12605 LOGE("Invalid hw %p", hw);
12606 return;
12607 }
12608
12609 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12610 return;
12611}
12612
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012613/*===========================================================================
12614 * FUNCTION : setBufferErrorStatus
12615 *
12616 * DESCRIPTION: Callback handler for channels to report any buffer errors
12617 *
12618 * PARAMETERS :
12619 * @ch : Channel on which buffer error is reported from
12620 * @frame_number : frame number on which buffer error is reported on
12621 * @buffer_status : buffer error status
12622 * @userdata: userdata
12623 *
12624 * RETURN : NONE
12625 *==========================================================================*/
12626void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12627 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12628{
12629 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12630 if (hw == NULL) {
12631 LOGE("Invalid hw %p", hw);
12632 return;
12633 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012634
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012635 hw->setBufferErrorStatus(ch, frame_number, err);
12636 return;
12637}
12638
12639void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12640 uint32_t frameNumber, camera3_buffer_status_t err)
12641{
12642 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12643 pthread_mutex_lock(&mMutex);
12644
12645 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12646 if (req.frame_number != frameNumber)
12647 continue;
12648 for (auto& k : req.mPendingBufferList) {
12649 if(k.stream->priv == ch) {
12650 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12651 }
12652 }
12653 }
12654
12655 pthread_mutex_unlock(&mMutex);
12656 return;
12657}
Thierry Strudel3d639192016-09-09 11:52:26 -070012658/*===========================================================================
12659 * FUNCTION : initialize
12660 *
12661 * DESCRIPTION: Pass framework callback pointers to HAL
12662 *
12663 * PARAMETERS :
12664 *
12665 *
12666 * RETURN : Success : 0
12667 * Failure: -ENODEV
12668 *==========================================================================*/
12669
12670int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12671 const camera3_callback_ops_t *callback_ops)
12672{
12673 LOGD("E");
12674 QCamera3HardwareInterface *hw =
12675 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12676 if (!hw) {
12677 LOGE("NULL camera device");
12678 return -ENODEV;
12679 }
12680
12681 int rc = hw->initialize(callback_ops);
12682 LOGD("X");
12683 return rc;
12684}
12685
12686/*===========================================================================
12687 * FUNCTION : configure_streams
12688 *
12689 * DESCRIPTION:
12690 *
12691 * PARAMETERS :
12692 *
12693 *
12694 * RETURN : Success: 0
12695 * Failure: -EINVAL (if stream configuration is invalid)
12696 * -ENODEV (fatal error)
12697 *==========================================================================*/
12698
12699int QCamera3HardwareInterface::configure_streams(
12700 const struct camera3_device *device,
12701 camera3_stream_configuration_t *stream_list)
12702{
12703 LOGD("E");
12704 QCamera3HardwareInterface *hw =
12705 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12706 if (!hw) {
12707 LOGE("NULL camera device");
12708 return -ENODEV;
12709 }
12710 int rc = hw->configureStreams(stream_list);
12711 LOGD("X");
12712 return rc;
12713}
12714
12715/*===========================================================================
12716 * FUNCTION : construct_default_request_settings
12717 *
12718 * DESCRIPTION: Configure a settings buffer to meet the required use case
12719 *
12720 * PARAMETERS :
12721 *
12722 *
12723 * RETURN : Success: Return valid metadata
12724 * Failure: Return NULL
12725 *==========================================================================*/
12726const camera_metadata_t* QCamera3HardwareInterface::
12727 construct_default_request_settings(const struct camera3_device *device,
12728 int type)
12729{
12730
12731 LOGD("E");
12732 camera_metadata_t* fwk_metadata = NULL;
12733 QCamera3HardwareInterface *hw =
12734 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12735 if (!hw) {
12736 LOGE("NULL camera device");
12737 return NULL;
12738 }
12739
12740 fwk_metadata = hw->translateCapabilityToMetadata(type);
12741
12742 LOGD("X");
12743 return fwk_metadata;
12744}
12745
12746/*===========================================================================
12747 * FUNCTION : process_capture_request
12748 *
12749 * DESCRIPTION:
12750 *
12751 * PARAMETERS :
12752 *
12753 *
12754 * RETURN :
12755 *==========================================================================*/
12756int QCamera3HardwareInterface::process_capture_request(
12757 const struct camera3_device *device,
12758 camera3_capture_request_t *request)
12759{
12760 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012761 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012762 QCamera3HardwareInterface *hw =
12763 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12764 if (!hw) {
12765 LOGE("NULL camera device");
12766 return -EINVAL;
12767 }
12768
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012769 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012770 LOGD("X");
12771 return rc;
12772}
12773
12774/*===========================================================================
12775 * FUNCTION : dump
12776 *
12777 * DESCRIPTION:
12778 *
12779 * PARAMETERS :
12780 *
12781 *
12782 * RETURN :
12783 *==========================================================================*/
12784
12785void QCamera3HardwareInterface::dump(
12786 const struct camera3_device *device, int fd)
12787{
12788 /* Log level property is read when "adb shell dumpsys media.camera" is
12789 called so that the log level can be controlled without restarting
12790 the media server */
12791 getLogLevel();
12792
12793 LOGD("E");
12794 QCamera3HardwareInterface *hw =
12795 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12796 if (!hw) {
12797 LOGE("NULL camera device");
12798 return;
12799 }
12800
12801 hw->dump(fd);
12802 LOGD("X");
12803 return;
12804}
12805
12806/*===========================================================================
12807 * FUNCTION : flush
12808 *
12809 * DESCRIPTION:
12810 *
12811 * PARAMETERS :
12812 *
12813 *
12814 * RETURN :
12815 *==========================================================================*/
12816
12817int QCamera3HardwareInterface::flush(
12818 const struct camera3_device *device)
12819{
12820 int rc;
12821 LOGD("E");
12822 QCamera3HardwareInterface *hw =
12823 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12824 if (!hw) {
12825 LOGE("NULL camera device");
12826 return -EINVAL;
12827 }
12828
12829 pthread_mutex_lock(&hw->mMutex);
12830 // Validate current state
12831 switch (hw->mState) {
12832 case STARTED:
12833 /* valid state */
12834 break;
12835
12836 case ERROR:
12837 pthread_mutex_unlock(&hw->mMutex);
12838 hw->handleCameraDeviceError();
12839 return -ENODEV;
12840
12841 default:
12842 LOGI("Flush returned during state %d", hw->mState);
12843 pthread_mutex_unlock(&hw->mMutex);
12844 return 0;
12845 }
12846 pthread_mutex_unlock(&hw->mMutex);
12847
12848 rc = hw->flush(true /* restart channels */ );
12849 LOGD("X");
12850 return rc;
12851}
12852
12853/*===========================================================================
12854 * FUNCTION : close_camera_device
12855 *
12856 * DESCRIPTION:
12857 *
12858 * PARAMETERS :
12859 *
12860 *
12861 * RETURN :
12862 *==========================================================================*/
12863int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12864{
12865 int ret = NO_ERROR;
12866 QCamera3HardwareInterface *hw =
12867 reinterpret_cast<QCamera3HardwareInterface *>(
12868 reinterpret_cast<camera3_device_t *>(device)->priv);
12869 if (!hw) {
12870 LOGE("NULL camera device");
12871 return BAD_VALUE;
12872 }
12873
12874 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12875 delete hw;
12876 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012877 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012878 return ret;
12879}
12880
12881/*===========================================================================
12882 * FUNCTION : getWaveletDenoiseProcessPlate
12883 *
12884 * DESCRIPTION: query wavelet denoise process plate
12885 *
12886 * PARAMETERS : None
12887 *
12888 * RETURN : WNR prcocess plate value
12889 *==========================================================================*/
12890cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12891{
12892 char prop[PROPERTY_VALUE_MAX];
12893 memset(prop, 0, sizeof(prop));
12894 property_get("persist.denoise.process.plates", prop, "0");
12895 int processPlate = atoi(prop);
12896 switch(processPlate) {
12897 case 0:
12898 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12899 case 1:
12900 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12901 case 2:
12902 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12903 case 3:
12904 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12905 default:
12906 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12907 }
12908}
12909
12910
12911/*===========================================================================
12912 * FUNCTION : getTemporalDenoiseProcessPlate
12913 *
12914 * DESCRIPTION: query temporal denoise process plate
12915 *
12916 * PARAMETERS : None
12917 *
12918 * RETURN : TNR prcocess plate value
12919 *==========================================================================*/
12920cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12921{
12922 char prop[PROPERTY_VALUE_MAX];
12923 memset(prop, 0, sizeof(prop));
12924 property_get("persist.tnr.process.plates", prop, "0");
12925 int processPlate = atoi(prop);
12926 switch(processPlate) {
12927 case 0:
12928 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12929 case 1:
12930 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12931 case 2:
12932 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12933 case 3:
12934 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12935 default:
12936 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12937 }
12938}
12939
12940
12941/*===========================================================================
12942 * FUNCTION : extractSceneMode
12943 *
12944 * DESCRIPTION: Extract scene mode from frameworks set metadata
12945 *
12946 * PARAMETERS :
12947 * @frame_settings: CameraMetadata reference
12948 * @metaMode: ANDROID_CONTORL_MODE
12949 * @hal_metadata: hal metadata structure
12950 *
12951 * RETURN : None
12952 *==========================================================================*/
12953int32_t QCamera3HardwareInterface::extractSceneMode(
12954 const CameraMetadata &frame_settings, uint8_t metaMode,
12955 metadata_buffer_t *hal_metadata)
12956{
12957 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012958 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12959
12960 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12961 LOGD("Ignoring control mode OFF_KEEP_STATE");
12962 return NO_ERROR;
12963 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012964
12965 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12966 camera_metadata_ro_entry entry =
12967 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12968 if (0 == entry.count)
12969 return rc;
12970
12971 uint8_t fwk_sceneMode = entry.data.u8[0];
12972
12973 int val = lookupHalName(SCENE_MODES_MAP,
12974 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12975 fwk_sceneMode);
12976 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012977 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012978 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012979 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012980 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012981
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012982 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12983 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12984 }
12985
12986 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12987 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012988 cam_hdr_param_t hdr_params;
12989 hdr_params.hdr_enable = 1;
12990 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12991 hdr_params.hdr_need_1x = false;
12992 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12993 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12994 rc = BAD_VALUE;
12995 }
12996 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012997
Thierry Strudel3d639192016-09-09 11:52:26 -070012998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12999 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13000 rc = BAD_VALUE;
13001 }
13002 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013003
13004 if (mForceHdrSnapshot) {
13005 cam_hdr_param_t hdr_params;
13006 hdr_params.hdr_enable = 1;
13007 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13008 hdr_params.hdr_need_1x = false;
13009 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13010 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13011 rc = BAD_VALUE;
13012 }
13013 }
13014
Thierry Strudel3d639192016-09-09 11:52:26 -070013015 return rc;
13016}
13017
13018/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013019 * FUNCTION : setVideoHdrMode
13020 *
13021 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13022 *
13023 * PARAMETERS :
13024 * @hal_metadata: hal metadata structure
13025 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13026 *
13027 * RETURN : None
13028 *==========================================================================*/
13029int32_t QCamera3HardwareInterface::setVideoHdrMode(
13030 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13031{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013032 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13033 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13034 }
13035
13036 LOGE("Invalid Video HDR mode %d!", vhdr);
13037 return BAD_VALUE;
13038}
13039
13040/*===========================================================================
13041 * FUNCTION : setSensorHDR
13042 *
13043 * DESCRIPTION: Enable/disable sensor HDR.
13044 *
13045 * PARAMETERS :
13046 * @hal_metadata: hal metadata structure
13047 * @enable: boolean whether to enable/disable sensor HDR
13048 *
13049 * RETURN : None
13050 *==========================================================================*/
13051int32_t QCamera3HardwareInterface::setSensorHDR(
13052 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13053{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013054 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013055 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13056
13057 if (enable) {
13058 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13059 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13060 #ifdef _LE_CAMERA_
13061 //Default to staggered HDR for IOT
13062 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13063 #else
13064 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13065 #endif
13066 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13067 }
13068
13069 bool isSupported = false;
13070 switch (sensor_hdr) {
13071 case CAM_SENSOR_HDR_IN_SENSOR:
13072 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13073 CAM_QCOM_FEATURE_SENSOR_HDR) {
13074 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013075 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013076 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013077 break;
13078 case CAM_SENSOR_HDR_ZIGZAG:
13079 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13080 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13081 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013082 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013083 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013084 break;
13085 case CAM_SENSOR_HDR_STAGGERED:
13086 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13087 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13088 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013089 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013090 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013091 break;
13092 case CAM_SENSOR_HDR_OFF:
13093 isSupported = true;
13094 LOGD("Turning off sensor HDR");
13095 break;
13096 default:
13097 LOGE("HDR mode %d not supported", sensor_hdr);
13098 rc = BAD_VALUE;
13099 break;
13100 }
13101
13102 if(isSupported) {
13103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13104 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13105 rc = BAD_VALUE;
13106 } else {
13107 if(!isVideoHdrEnable)
13108 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013109 }
13110 }
13111 return rc;
13112}
13113
13114/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013115 * FUNCTION : needRotationReprocess
13116 *
13117 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13118 *
13119 * PARAMETERS : none
13120 *
13121 * RETURN : true: needed
13122 * false: no need
13123 *==========================================================================*/
13124bool QCamera3HardwareInterface::needRotationReprocess()
13125{
13126 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13127 // current rotation is not zero, and pp has the capability to process rotation
13128 LOGH("need do reprocess for rotation");
13129 return true;
13130 }
13131
13132 return false;
13133}
13134
13135/*===========================================================================
13136 * FUNCTION : needReprocess
13137 *
13138 * DESCRIPTION: if reprocess in needed
13139 *
13140 * PARAMETERS : none
13141 *
13142 * RETURN : true: needed
13143 * false: no need
13144 *==========================================================================*/
13145bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13146{
13147 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13148 // TODO: add for ZSL HDR later
13149 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13150 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13151 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13152 return true;
13153 } else {
13154 LOGH("already post processed frame");
13155 return false;
13156 }
13157 }
13158 return needRotationReprocess();
13159}
13160
13161/*===========================================================================
13162 * FUNCTION : needJpegExifRotation
13163 *
13164 * DESCRIPTION: if rotation from jpeg is needed
13165 *
13166 * PARAMETERS : none
13167 *
13168 * RETURN : true: needed
13169 * false: no need
13170 *==========================================================================*/
13171bool QCamera3HardwareInterface::needJpegExifRotation()
13172{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013173 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013174 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13175 LOGD("Need use Jpeg EXIF Rotation");
13176 return true;
13177 }
13178 return false;
13179}
13180
13181/*===========================================================================
13182 * FUNCTION : addOfflineReprocChannel
13183 *
13184 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13185 * coming from input channel
13186 *
13187 * PARAMETERS :
13188 * @config : reprocess configuration
13189 * @inputChHandle : pointer to the input (source) channel
13190 *
13191 *
13192 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13193 *==========================================================================*/
13194QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13195 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13196{
13197 int32_t rc = NO_ERROR;
13198 QCamera3ReprocessChannel *pChannel = NULL;
13199
13200 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013201 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13202 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013203 if (NULL == pChannel) {
13204 LOGE("no mem for reprocess channel");
13205 return NULL;
13206 }
13207
13208 rc = pChannel->initialize(IS_TYPE_NONE);
13209 if (rc != NO_ERROR) {
13210 LOGE("init reprocess channel failed, ret = %d", rc);
13211 delete pChannel;
13212 return NULL;
13213 }
13214
13215 // pp feature config
13216 cam_pp_feature_config_t pp_config;
13217 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13218
13219 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13220 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13221 & CAM_QCOM_FEATURE_DSDN) {
13222 //Use CPP CDS incase h/w supports it.
13223 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13224 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13225 }
13226 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13227 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13228 }
13229
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013230 if (config.hdr_param.hdr_enable) {
13231 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13232 pp_config.hdr_param = config.hdr_param;
13233 }
13234
13235 if (mForceHdrSnapshot) {
13236 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13237 pp_config.hdr_param.hdr_enable = 1;
13238 pp_config.hdr_param.hdr_need_1x = 0;
13239 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13240 }
13241
Thierry Strudel3d639192016-09-09 11:52:26 -070013242 rc = pChannel->addReprocStreamsFromSource(pp_config,
13243 config,
13244 IS_TYPE_NONE,
13245 mMetadataChannel);
13246
13247 if (rc != NO_ERROR) {
13248 delete pChannel;
13249 return NULL;
13250 }
13251 return pChannel;
13252}
13253
13254/*===========================================================================
13255 * FUNCTION : getMobicatMask
13256 *
13257 * DESCRIPTION: returns mobicat mask
13258 *
13259 * PARAMETERS : none
13260 *
13261 * RETURN : mobicat mask
13262 *
13263 *==========================================================================*/
13264uint8_t QCamera3HardwareInterface::getMobicatMask()
13265{
13266 return m_MobicatMask;
13267}
13268
13269/*===========================================================================
13270 * FUNCTION : setMobicat
13271 *
13272 * DESCRIPTION: set Mobicat on/off.
13273 *
13274 * PARAMETERS :
13275 * @params : none
13276 *
13277 * RETURN : int32_t type of status
13278 * NO_ERROR -- success
13279 * none-zero failure code
13280 *==========================================================================*/
13281int32_t QCamera3HardwareInterface::setMobicat()
13282{
13283 char value [PROPERTY_VALUE_MAX];
13284 property_get("persist.camera.mobicat", value, "0");
13285 int32_t ret = NO_ERROR;
13286 uint8_t enableMobi = (uint8_t)atoi(value);
13287
13288 if (enableMobi) {
13289 tune_cmd_t tune_cmd;
13290 tune_cmd.type = SET_RELOAD_CHROMATIX;
13291 tune_cmd.module = MODULE_ALL;
13292 tune_cmd.value = TRUE;
13293 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13294 CAM_INTF_PARM_SET_VFE_COMMAND,
13295 tune_cmd);
13296
13297 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13298 CAM_INTF_PARM_SET_PP_COMMAND,
13299 tune_cmd);
13300 }
13301 m_MobicatMask = enableMobi;
13302
13303 return ret;
13304}
13305
13306/*===========================================================================
13307* FUNCTION : getLogLevel
13308*
13309* DESCRIPTION: Reads the log level property into a variable
13310*
13311* PARAMETERS :
13312* None
13313*
13314* RETURN :
13315* None
13316*==========================================================================*/
13317void QCamera3HardwareInterface::getLogLevel()
13318{
13319 char prop[PROPERTY_VALUE_MAX];
13320 uint32_t globalLogLevel = 0;
13321
13322 property_get("persist.camera.hal.debug", prop, "0");
13323 int val = atoi(prop);
13324 if (0 <= val) {
13325 gCamHal3LogLevel = (uint32_t)val;
13326 }
13327
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013328 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013329 gKpiDebugLevel = atoi(prop);
13330
13331 property_get("persist.camera.global.debug", prop, "0");
13332 val = atoi(prop);
13333 if (0 <= val) {
13334 globalLogLevel = (uint32_t)val;
13335 }
13336
13337 /* Highest log level among hal.logs and global.logs is selected */
13338 if (gCamHal3LogLevel < globalLogLevel)
13339 gCamHal3LogLevel = globalLogLevel;
13340
13341 return;
13342}
13343
13344/*===========================================================================
13345 * FUNCTION : validateStreamRotations
13346 *
13347 * DESCRIPTION: Check if the rotations requested are supported
13348 *
13349 * PARAMETERS :
13350 * @stream_list : streams to be configured
13351 *
13352 * RETURN : NO_ERROR on success
13353 * -EINVAL on failure
13354 *
13355 *==========================================================================*/
13356int QCamera3HardwareInterface::validateStreamRotations(
13357 camera3_stream_configuration_t *streamList)
13358{
13359 int rc = NO_ERROR;
13360
13361 /*
13362 * Loop through all streams requested in configuration
13363 * Check if unsupported rotations have been requested on any of them
13364 */
13365 for (size_t j = 0; j < streamList->num_streams; j++){
13366 camera3_stream_t *newStream = streamList->streams[j];
13367
13368 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13369 bool isImplDef = (newStream->format ==
13370 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13371 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13372 isImplDef);
13373
13374 if (isRotated && (!isImplDef || isZsl)) {
13375 LOGE("Error: Unsupported rotation of %d requested for stream"
13376 "type:%d and stream format:%d",
13377 newStream->rotation, newStream->stream_type,
13378 newStream->format);
13379 rc = -EINVAL;
13380 break;
13381 }
13382 }
13383
13384 return rc;
13385}
13386
13387/*===========================================================================
13388* FUNCTION : getFlashInfo
13389*
13390* DESCRIPTION: Retrieve information about whether the device has a flash.
13391*
13392* PARAMETERS :
13393* @cameraId : Camera id to query
13394* @hasFlash : Boolean indicating whether there is a flash device
13395* associated with given camera
13396* @flashNode : If a flash device exists, this will be its device node.
13397*
13398* RETURN :
13399* None
13400*==========================================================================*/
13401void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13402 bool& hasFlash,
13403 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13404{
13405 cam_capability_t* camCapability = gCamCapability[cameraId];
13406 if (NULL == camCapability) {
13407 hasFlash = false;
13408 flashNode[0] = '\0';
13409 } else {
13410 hasFlash = camCapability->flash_available;
13411 strlcpy(flashNode,
13412 (char*)camCapability->flash_dev_name,
13413 QCAMERA_MAX_FILEPATH_LENGTH);
13414 }
13415}
13416
13417/*===========================================================================
13418* FUNCTION : getEepromVersionInfo
13419*
13420* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13421*
13422* PARAMETERS : None
13423*
13424* RETURN : string describing EEPROM version
13425* "\0" if no such info available
13426*==========================================================================*/
13427const char *QCamera3HardwareInterface::getEepromVersionInfo()
13428{
13429 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13430}
13431
13432/*===========================================================================
13433* FUNCTION : getLdafCalib
13434*
13435* DESCRIPTION: Retrieve Laser AF calibration data
13436*
13437* PARAMETERS : None
13438*
13439* RETURN : Two uint32_t describing laser AF calibration data
13440* NULL if none is available.
13441*==========================================================================*/
13442const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13443{
13444 if (mLdafCalibExist) {
13445 return &mLdafCalib[0];
13446 } else {
13447 return NULL;
13448 }
13449}
13450
13451/*===========================================================================
13452 * FUNCTION : dynamicUpdateMetaStreamInfo
13453 *
13454 * DESCRIPTION: This function:
13455 * (1) stops all the channels
13456 * (2) returns error on pending requests and buffers
13457 * (3) sends metastream_info in setparams
13458 * (4) starts all channels
13459 * This is useful when sensor has to be restarted to apply any
13460 * settings such as frame rate from a different sensor mode
13461 *
13462 * PARAMETERS : None
13463 *
13464 * RETURN : NO_ERROR on success
13465 * Error codes on failure
13466 *
13467 *==========================================================================*/
13468int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13469{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013470 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013471 int rc = NO_ERROR;
13472
13473 LOGD("E");
13474
13475 rc = stopAllChannels();
13476 if (rc < 0) {
13477 LOGE("stopAllChannels failed");
13478 return rc;
13479 }
13480
13481 rc = notifyErrorForPendingRequests();
13482 if (rc < 0) {
13483 LOGE("notifyErrorForPendingRequests failed");
13484 return rc;
13485 }
13486
13487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13488 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13489 "Format:%d",
13490 mStreamConfigInfo.type[i],
13491 mStreamConfigInfo.stream_sizes[i].width,
13492 mStreamConfigInfo.stream_sizes[i].height,
13493 mStreamConfigInfo.postprocess_mask[i],
13494 mStreamConfigInfo.format[i]);
13495 }
13496
13497 /* Send meta stream info once again so that ISP can start */
13498 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13499 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13500 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13501 mParameters);
13502 if (rc < 0) {
13503 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13504 }
13505
13506 rc = startAllChannels();
13507 if (rc < 0) {
13508 LOGE("startAllChannels failed");
13509 return rc;
13510 }
13511
13512 LOGD("X");
13513 return rc;
13514}
13515
13516/*===========================================================================
13517 * FUNCTION : stopAllChannels
13518 *
13519 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13520 *
13521 * PARAMETERS : None
13522 *
13523 * RETURN : NO_ERROR on success
13524 * Error codes on failure
13525 *
13526 *==========================================================================*/
13527int32_t QCamera3HardwareInterface::stopAllChannels()
13528{
13529 int32_t rc = NO_ERROR;
13530
13531 LOGD("Stopping all channels");
13532 // Stop the Streams/Channels
13533 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13534 it != mStreamInfo.end(); it++) {
13535 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13536 if (channel) {
13537 channel->stop();
13538 }
13539 (*it)->status = INVALID;
13540 }
13541
13542 if (mSupportChannel) {
13543 mSupportChannel->stop();
13544 }
13545 if (mAnalysisChannel) {
13546 mAnalysisChannel->stop();
13547 }
13548 if (mRawDumpChannel) {
13549 mRawDumpChannel->stop();
13550 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013551 if (mHdrPlusRawSrcChannel) {
13552 mHdrPlusRawSrcChannel->stop();
13553 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013554 if (mMetadataChannel) {
13555 /* If content of mStreamInfo is not 0, there is metadata stream */
13556 mMetadataChannel->stop();
13557 }
13558
13559 LOGD("All channels stopped");
13560 return rc;
13561}
13562
13563/*===========================================================================
13564 * FUNCTION : startAllChannels
13565 *
13566 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13567 *
13568 * PARAMETERS : None
13569 *
13570 * RETURN : NO_ERROR on success
13571 * Error codes on failure
13572 *
13573 *==========================================================================*/
13574int32_t QCamera3HardwareInterface::startAllChannels()
13575{
13576 int32_t rc = NO_ERROR;
13577
13578 LOGD("Start all channels ");
13579 // Start the Streams/Channels
13580 if (mMetadataChannel) {
13581 /* If content of mStreamInfo is not 0, there is metadata stream */
13582 rc = mMetadataChannel->start();
13583 if (rc < 0) {
13584 LOGE("META channel start failed");
13585 return rc;
13586 }
13587 }
13588 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13589 it != mStreamInfo.end(); it++) {
13590 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13591 if (channel) {
13592 rc = channel->start();
13593 if (rc < 0) {
13594 LOGE("channel start failed");
13595 return rc;
13596 }
13597 }
13598 }
13599 if (mAnalysisChannel) {
13600 mAnalysisChannel->start();
13601 }
13602 if (mSupportChannel) {
13603 rc = mSupportChannel->start();
13604 if (rc < 0) {
13605 LOGE("Support channel start failed");
13606 return rc;
13607 }
13608 }
13609 if (mRawDumpChannel) {
13610 rc = mRawDumpChannel->start();
13611 if (rc < 0) {
13612 LOGE("RAW dump channel start failed");
13613 return rc;
13614 }
13615 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013616 if (mHdrPlusRawSrcChannel) {
13617 rc = mHdrPlusRawSrcChannel->start();
13618 if (rc < 0) {
13619 LOGE("HDR+ RAW channel start failed");
13620 return rc;
13621 }
13622 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013623
13624 LOGD("All channels started");
13625 return rc;
13626}
13627
13628/*===========================================================================
13629 * FUNCTION : notifyErrorForPendingRequests
13630 *
13631 * DESCRIPTION: This function sends error for all the pending requests/buffers
13632 *
13633 * PARAMETERS : None
13634 *
13635 * RETURN : Error codes
13636 * NO_ERROR on success
13637 *
13638 *==========================================================================*/
13639int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13640{
13641 int32_t rc = NO_ERROR;
13642 unsigned int frameNum = 0;
13643 camera3_capture_result_t result;
13644 camera3_stream_buffer_t *pStream_Buf = NULL;
13645
13646 memset(&result, 0, sizeof(camera3_capture_result_t));
13647
13648 if (mPendingRequestsList.size() > 0) {
13649 pendingRequestIterator i = mPendingRequestsList.begin();
13650 frameNum = i->frame_number;
13651 } else {
13652 /* There might still be pending buffers even though there are
13653 no pending requests. Setting the frameNum to MAX so that
13654 all the buffers with smaller frame numbers are returned */
13655 frameNum = UINT_MAX;
13656 }
13657
13658 LOGH("Oldest frame num on mPendingRequestsList = %u",
13659 frameNum);
13660
Emilian Peev7650c122017-01-19 08:24:33 -080013661 notifyErrorFoPendingDepthData(mDepthChannel);
13662
Thierry Strudel3d639192016-09-09 11:52:26 -070013663 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13664 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13665
13666 if (req->frame_number < frameNum) {
13667 // Send Error notify to frameworks for each buffer for which
13668 // metadata buffer is already sent
13669 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13670 req->frame_number, req->mPendingBufferList.size());
13671
13672 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13673 if (NULL == pStream_Buf) {
13674 LOGE("No memory for pending buffers array");
13675 return NO_MEMORY;
13676 }
13677 memset(pStream_Buf, 0,
13678 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13679 result.result = NULL;
13680 result.frame_number = req->frame_number;
13681 result.num_output_buffers = req->mPendingBufferList.size();
13682 result.output_buffers = pStream_Buf;
13683
13684 size_t index = 0;
13685 for (auto info = req->mPendingBufferList.begin();
13686 info != req->mPendingBufferList.end(); ) {
13687
13688 camera3_notify_msg_t notify_msg;
13689 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13690 notify_msg.type = CAMERA3_MSG_ERROR;
13691 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13692 notify_msg.message.error.error_stream = info->stream;
13693 notify_msg.message.error.frame_number = req->frame_number;
13694 pStream_Buf[index].acquire_fence = -1;
13695 pStream_Buf[index].release_fence = -1;
13696 pStream_Buf[index].buffer = info->buffer;
13697 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13698 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013699 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013700 index++;
13701 // Remove buffer from list
13702 info = req->mPendingBufferList.erase(info);
13703 }
13704
13705 // Remove this request from Map
13706 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13707 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13708 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13709
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013710 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013711
13712 delete [] pStream_Buf;
13713 } else {
13714
13715 // Go through the pending requests info and send error request to framework
13716 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13717
13718 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13719
13720 // Send error notify to frameworks
13721 camera3_notify_msg_t notify_msg;
13722 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13723 notify_msg.type = CAMERA3_MSG_ERROR;
13724 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13725 notify_msg.message.error.error_stream = NULL;
13726 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013727 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013728
13729 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13730 if (NULL == pStream_Buf) {
13731 LOGE("No memory for pending buffers array");
13732 return NO_MEMORY;
13733 }
13734 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13735
13736 result.result = NULL;
13737 result.frame_number = req->frame_number;
13738 result.input_buffer = i->input_buffer;
13739 result.num_output_buffers = req->mPendingBufferList.size();
13740 result.output_buffers = pStream_Buf;
13741
13742 size_t index = 0;
13743 for (auto info = req->mPendingBufferList.begin();
13744 info != req->mPendingBufferList.end(); ) {
13745 pStream_Buf[index].acquire_fence = -1;
13746 pStream_Buf[index].release_fence = -1;
13747 pStream_Buf[index].buffer = info->buffer;
13748 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13749 pStream_Buf[index].stream = info->stream;
13750 index++;
13751 // Remove buffer from list
13752 info = req->mPendingBufferList.erase(info);
13753 }
13754
13755 // Remove this request from Map
13756 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13757 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13758 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13759
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013760 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013761 delete [] pStream_Buf;
13762 i = erasePendingRequest(i);
13763 }
13764 }
13765
13766 /* Reset pending frame Drop list and requests list */
13767 mPendingFrameDropList.clear();
13768
13769 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13770 req.mPendingBufferList.clear();
13771 }
13772 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013773 LOGH("Cleared all the pending buffers ");
13774
13775 return rc;
13776}
13777
13778bool QCamera3HardwareInterface::isOnEncoder(
13779 const cam_dimension_t max_viewfinder_size,
13780 uint32_t width, uint32_t height)
13781{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013782 return ((width > (uint32_t)max_viewfinder_size.width) ||
13783 (height > (uint32_t)max_viewfinder_size.height) ||
13784 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13785 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013786}
13787
13788/*===========================================================================
13789 * FUNCTION : setBundleInfo
13790 *
13791 * DESCRIPTION: Set bundle info for all streams that are bundle.
13792 *
13793 * PARAMETERS : None
13794 *
13795 * RETURN : NO_ERROR on success
13796 * Error codes on failure
13797 *==========================================================================*/
13798int32_t QCamera3HardwareInterface::setBundleInfo()
13799{
13800 int32_t rc = NO_ERROR;
13801
13802 if (mChannelHandle) {
13803 cam_bundle_config_t bundleInfo;
13804 memset(&bundleInfo, 0, sizeof(bundleInfo));
13805 rc = mCameraHandle->ops->get_bundle_info(
13806 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13807 if (rc != NO_ERROR) {
13808 LOGE("get_bundle_info failed");
13809 return rc;
13810 }
13811 if (mAnalysisChannel) {
13812 mAnalysisChannel->setBundleInfo(bundleInfo);
13813 }
13814 if (mSupportChannel) {
13815 mSupportChannel->setBundleInfo(bundleInfo);
13816 }
13817 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13818 it != mStreamInfo.end(); it++) {
13819 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13820 channel->setBundleInfo(bundleInfo);
13821 }
13822 if (mRawDumpChannel) {
13823 mRawDumpChannel->setBundleInfo(bundleInfo);
13824 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013825 if (mHdrPlusRawSrcChannel) {
13826 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13827 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013828 }
13829
13830 return rc;
13831}
13832
13833/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013834 * FUNCTION : setInstantAEC
13835 *
13836 * DESCRIPTION: Set Instant AEC related params.
13837 *
13838 * PARAMETERS :
13839 * @meta: CameraMetadata reference
13840 *
13841 * RETURN : NO_ERROR on success
13842 * Error codes on failure
13843 *==========================================================================*/
13844int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13845{
13846 int32_t rc = NO_ERROR;
13847 uint8_t val = 0;
13848 char prop[PROPERTY_VALUE_MAX];
13849
13850 // First try to configure instant AEC from framework metadata
13851 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13852 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13853 }
13854
13855 // If framework did not set this value, try to read from set prop.
13856 if (val == 0) {
13857 memset(prop, 0, sizeof(prop));
13858 property_get("persist.camera.instant.aec", prop, "0");
13859 val = (uint8_t)atoi(prop);
13860 }
13861
13862 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13863 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13864 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13865 mInstantAEC = val;
13866 mInstantAECSettledFrameNumber = 0;
13867 mInstantAecFrameIdxCount = 0;
13868 LOGH("instantAEC value set %d",val);
13869 if (mInstantAEC) {
13870 memset(prop, 0, sizeof(prop));
13871 property_get("persist.camera.ae.instant.bound", prop, "10");
13872 int32_t aec_frame_skip_cnt = atoi(prop);
13873 if (aec_frame_skip_cnt >= 0) {
13874 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13875 } else {
13876 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13877 rc = BAD_VALUE;
13878 }
13879 }
13880 } else {
13881 LOGE("Bad instant aec value set %d", val);
13882 rc = BAD_VALUE;
13883 }
13884 return rc;
13885}
13886
13887/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013888 * FUNCTION : get_num_overall_buffers
13889 *
13890 * DESCRIPTION: Estimate number of pending buffers across all requests.
13891 *
13892 * PARAMETERS : None
13893 *
13894 * RETURN : Number of overall pending buffers
13895 *
13896 *==========================================================================*/
13897uint32_t PendingBuffersMap::get_num_overall_buffers()
13898{
13899 uint32_t sum_buffers = 0;
13900 for (auto &req : mPendingBuffersInRequest) {
13901 sum_buffers += req.mPendingBufferList.size();
13902 }
13903 return sum_buffers;
13904}
13905
13906/*===========================================================================
13907 * FUNCTION : removeBuf
13908 *
13909 * DESCRIPTION: Remove a matching buffer from tracker.
13910 *
13911 * PARAMETERS : @buffer: image buffer for the callback
13912 *
13913 * RETURN : None
13914 *
13915 *==========================================================================*/
13916void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13917{
13918 bool buffer_found = false;
13919 for (auto req = mPendingBuffersInRequest.begin();
13920 req != mPendingBuffersInRequest.end(); req++) {
13921 for (auto k = req->mPendingBufferList.begin();
13922 k != req->mPendingBufferList.end(); k++ ) {
13923 if (k->buffer == buffer) {
13924 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13925 req->frame_number, buffer);
13926 k = req->mPendingBufferList.erase(k);
13927 if (req->mPendingBufferList.empty()) {
13928 // Remove this request from Map
13929 req = mPendingBuffersInRequest.erase(req);
13930 }
13931 buffer_found = true;
13932 break;
13933 }
13934 }
13935 if (buffer_found) {
13936 break;
13937 }
13938 }
13939 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13940 get_num_overall_buffers());
13941}
13942
13943/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013944 * FUNCTION : getBufErrStatus
13945 *
13946 * DESCRIPTION: get buffer error status
13947 *
13948 * PARAMETERS : @buffer: buffer handle
13949 *
13950 * RETURN : Error status
13951 *
13952 *==========================================================================*/
13953int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13954{
13955 for (auto& req : mPendingBuffersInRequest) {
13956 for (auto& k : req.mPendingBufferList) {
13957 if (k.buffer == buffer)
13958 return k.bufStatus;
13959 }
13960 }
13961 return CAMERA3_BUFFER_STATUS_OK;
13962}
13963
13964/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013965 * FUNCTION : setPAAFSupport
13966 *
13967 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13968 * feature mask according to stream type and filter
13969 * arrangement
13970 *
13971 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13972 * @stream_type: stream type
13973 * @filter_arrangement: filter arrangement
13974 *
13975 * RETURN : None
13976 *==========================================================================*/
13977void QCamera3HardwareInterface::setPAAFSupport(
13978 cam_feature_mask_t& feature_mask,
13979 cam_stream_type_t stream_type,
13980 cam_color_filter_arrangement_t filter_arrangement)
13981{
Thierry Strudel3d639192016-09-09 11:52:26 -070013982 switch (filter_arrangement) {
13983 case CAM_FILTER_ARRANGEMENT_RGGB:
13984 case CAM_FILTER_ARRANGEMENT_GRBG:
13985 case CAM_FILTER_ARRANGEMENT_GBRG:
13986 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013987 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13988 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013989 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013990 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13991 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013992 }
13993 break;
13994 case CAM_FILTER_ARRANGEMENT_Y:
13995 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13996 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13997 }
13998 break;
13999 default:
14000 break;
14001 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014002 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14003 feature_mask, stream_type, filter_arrangement);
14004
14005
Thierry Strudel3d639192016-09-09 11:52:26 -070014006}
14007
14008/*===========================================================================
14009* FUNCTION : getSensorMountAngle
14010*
14011* DESCRIPTION: Retrieve sensor mount angle
14012*
14013* PARAMETERS : None
14014*
14015* RETURN : sensor mount angle in uint32_t
14016*==========================================================================*/
14017uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14018{
14019 return gCamCapability[mCameraId]->sensor_mount_angle;
14020}
14021
14022/*===========================================================================
14023* FUNCTION : getRelatedCalibrationData
14024*
14025* DESCRIPTION: Retrieve related system calibration data
14026*
14027* PARAMETERS : None
14028*
14029* RETURN : Pointer of related system calibration data
14030*==========================================================================*/
14031const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14032{
14033 return (const cam_related_system_calibration_data_t *)
14034 &(gCamCapability[mCameraId]->related_cam_calibration);
14035}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014036
14037/*===========================================================================
14038 * FUNCTION : is60HzZone
14039 *
14040 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14041 *
14042 * PARAMETERS : None
14043 *
14044 * RETURN : True if in 60Hz zone, False otherwise
14045 *==========================================================================*/
14046bool QCamera3HardwareInterface::is60HzZone()
14047{
14048 time_t t = time(NULL);
14049 struct tm lt;
14050
14051 struct tm* r = localtime_r(&t, &lt);
14052
14053 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14054 return true;
14055 else
14056 return false;
14057}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014058
14059/*===========================================================================
14060 * FUNCTION : adjustBlackLevelForCFA
14061 *
14062 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14063 * of bayer CFA (Color Filter Array).
14064 *
14065 * PARAMETERS : @input: black level pattern in the order of RGGB
14066 * @output: black level pattern in the order of CFA
14067 * @color_arrangement: CFA color arrangement
14068 *
14069 * RETURN : None
14070 *==========================================================================*/
14071template<typename T>
14072void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14073 T input[BLACK_LEVEL_PATTERN_CNT],
14074 T output[BLACK_LEVEL_PATTERN_CNT],
14075 cam_color_filter_arrangement_t color_arrangement)
14076{
14077 switch (color_arrangement) {
14078 case CAM_FILTER_ARRANGEMENT_GRBG:
14079 output[0] = input[1];
14080 output[1] = input[0];
14081 output[2] = input[3];
14082 output[3] = input[2];
14083 break;
14084 case CAM_FILTER_ARRANGEMENT_GBRG:
14085 output[0] = input[2];
14086 output[1] = input[3];
14087 output[2] = input[0];
14088 output[3] = input[1];
14089 break;
14090 case CAM_FILTER_ARRANGEMENT_BGGR:
14091 output[0] = input[3];
14092 output[1] = input[2];
14093 output[2] = input[1];
14094 output[3] = input[0];
14095 break;
14096 case CAM_FILTER_ARRANGEMENT_RGGB:
14097 output[0] = input[0];
14098 output[1] = input[1];
14099 output[2] = input[2];
14100 output[3] = input[3];
14101 break;
14102 default:
14103 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14104 break;
14105 }
14106}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014107
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014108void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14109 CameraMetadata &resultMetadata,
14110 std::shared_ptr<metadata_buffer_t> settings)
14111{
14112 if (settings == nullptr) {
14113 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14114 return;
14115 }
14116
14117 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14118 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14119 }
14120
14121 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14122 String8 str((const char *)gps_methods);
14123 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14124 }
14125
14126 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14127 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14128 }
14129
14130 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14131 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14132 }
14133
14134 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14135 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14136 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14137 }
14138
14139 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14140 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14141 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14142 }
14143
14144 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14145 int32_t fwk_thumb_size[2];
14146 fwk_thumb_size[0] = thumb_size->width;
14147 fwk_thumb_size[1] = thumb_size->height;
14148 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14149 }
14150
14151 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14152 uint8_t fwk_intent = intent[0];
14153 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14154 }
14155}
14156
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014157bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
14158 const camera3_capture_request_t &request, const CameraMetadata &metadata)
14159{
14160 if (hdrPlusRequest == nullptr) return false;
14161
14162 // Check noise reduction mode is high quality.
14163 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14164 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14165 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014166 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14167 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014168 return false;
14169 }
14170
14171 // Check edge mode is high quality.
14172 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14173 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14174 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14175 return false;
14176 }
14177
14178 if (request.num_output_buffers != 1 ||
14179 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14180 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014181 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14182 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14183 request.output_buffers[0].stream->width,
14184 request.output_buffers[0].stream->height,
14185 request.output_buffers[0].stream->format);
14186 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014187 return false;
14188 }
14189
14190 // Get a YUV buffer from pic channel.
14191 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14192 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14193 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14194 if (res != OK) {
14195 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14196 __FUNCTION__, strerror(-res), res);
14197 return false;
14198 }
14199
14200 pbcamera::StreamBuffer buffer;
14201 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014202 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014203 buffer.data = yuvBuffer->buffer;
14204 buffer.dataSize = yuvBuffer->frame_len;
14205
14206 pbcamera::CaptureRequest pbRequest;
14207 pbRequest.id = request.frame_number;
14208 pbRequest.outputBuffers.push_back(buffer);
14209
14210 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014211 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014212 if (res != OK) {
14213 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14214 strerror(-res), res);
14215 return false;
14216 }
14217
14218 hdrPlusRequest->yuvBuffer = yuvBuffer;
14219 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14220
14221 return true;
14222}
14223
Chien-Yu Chenee335912017-02-09 17:53:20 -080014224status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14225{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014226 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014227 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
14228 return -ENODEV;
14229 }
14230
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014231 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014232
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014233 // Connect to HDR+ service if it's not connected yet.
14234 pthread_mutex_lock(&gCamLock);
14235 if (!gEaselConnected) {
14236 // Connect to HDR+ service
14237 res = gHdrPlusClient->connect(this);
14238 if (res != OK) {
14239 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
14240 strerror(-res), res);
14241 pthread_mutex_unlock(&gCamLock);
14242 return res;
14243 }
14244
14245 // Set static metadata.
14246 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14247 if (res != OK) {
14248 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
14249 strerror(-res), res);
14250 gHdrPlusClient->disconnect();
14251 pthread_mutex_unlock(&gCamLock);
14252 return res;
14253 }
14254 gEaselConnected = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014255 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014256 pthread_mutex_unlock(&gCamLock);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014257
14258 // Configure stream for HDR+.
14259 res = configureHdrPlusStreamsLocked();
14260 if (res != OK) {
14261 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014262 return res;
14263 }
14264
14265 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14266 res = gHdrPlusClient->setZslHdrPlusMode(true);
14267 if (res != OK) {
14268 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014269 return res;
14270 }
14271
14272 mHdrPlusModeEnabled = true;
14273 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14274
14275 return OK;
14276}
14277
14278void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14279{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014280 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014281 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014282 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14283 if (res != OK) {
14284 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14285 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014286 }
14287
14288 mHdrPlusModeEnabled = false;
14289 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14290}
14291
14292status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014293{
14294 pbcamera::InputConfiguration inputConfig;
14295 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14296 status_t res = OK;
14297
14298 // Configure HDR+ client streams.
14299 // Get input config.
14300 if (mHdrPlusRawSrcChannel) {
14301 // HDR+ input buffers will be provided by HAL.
14302 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14303 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14304 if (res != OK) {
14305 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14306 __FUNCTION__, strerror(-res), res);
14307 return res;
14308 }
14309
14310 inputConfig.isSensorInput = false;
14311 } else {
14312 // Sensor MIPI will send data to Easel.
14313 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014314 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014315 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14316 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14317 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14318 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14319 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14320 if (mSensorModeInfo.num_raw_bits != 10) {
14321 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14322 mSensorModeInfo.num_raw_bits);
14323 return BAD_VALUE;
14324 }
14325
14326 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014327 }
14328
14329 // Get output configurations.
14330 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014331 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014332
14333 // Easel may need to output YUV output buffers if mPictureChannel was created.
14334 pbcamera::StreamConfiguration yuvOutputConfig;
14335 if (mPictureChannel != nullptr) {
14336 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14337 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14338 if (res != OK) {
14339 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14340 __FUNCTION__, strerror(-res), res);
14341
14342 return res;
14343 }
14344
14345 outputStreamConfigs.push_back(yuvOutputConfig);
14346 }
14347
14348 // TODO: consider other channels for YUV output buffers.
14349
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014350 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014351 if (res != OK) {
14352 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14353 strerror(-res), res);
14354 return res;
14355 }
14356
14357 return OK;
14358}
14359
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014360void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14361 const camera_metadata_t &resultMetadata) {
14362 if (result != nullptr) {
14363 if (result->outputBuffers.size() != 1) {
14364 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14365 result->outputBuffers.size());
14366 return;
14367 }
14368
14369 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14370 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14371 result->outputBuffers[0].streamId);
14372 return;
14373 }
14374
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014375 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014376 HdrPlusPendingRequest pendingRequest;
14377 {
14378 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14379 auto req = mHdrPlusPendingRequests.find(result->requestId);
14380 pendingRequest = req->second;
14381 }
14382
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014383 // Update the result metadata with the settings of the HDR+ still capture request because
14384 // the result metadata belongs to a ZSL buffer.
14385 CameraMetadata metadata;
14386 metadata = &resultMetadata;
14387 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14388 camera_metadata_t* updatedResultMetadata = metadata.release();
14389
14390 QCamera3PicChannel *picChannel =
14391 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14392
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014393 // Check if dumping HDR+ YUV output is enabled.
14394 char prop[PROPERTY_VALUE_MAX];
14395 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14396 bool dumpYuvOutput = atoi(prop);
14397
14398 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014399 // Dump yuv buffer to a ppm file.
14400 pbcamera::StreamConfiguration outputConfig;
14401 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14402 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14403 if (rc == OK) {
14404 char buf[FILENAME_MAX] = {};
14405 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14406 result->requestId, result->outputBuffers[0].streamId,
14407 outputConfig.image.width, outputConfig.image.height);
14408
14409 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14410 } else {
14411 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14412 __FUNCTION__, strerror(-rc), rc);
14413 }
14414 }
14415
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014416 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14417 auto halMetadata = std::make_shared<metadata_buffer_t>();
14418 clear_metadata_buffer(halMetadata.get());
14419
14420 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14421 // encoding.
14422 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14423 halStreamId, /*minFrameDuration*/0);
14424 if (res == OK) {
14425 // Return the buffer to pic channel for encoding.
14426 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14427 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14428 halMetadata);
14429 } else {
14430 // Return the buffer without encoding.
14431 // TODO: This should not happen but we may want to report an error buffer to camera
14432 // service.
14433 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14434 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14435 strerror(-res), res);
14436 }
14437
14438 // Send HDR+ metadata to framework.
14439 {
14440 pthread_mutex_lock(&mMutex);
14441
14442 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14443 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14444 pthread_mutex_unlock(&mMutex);
14445 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014446
14447 // Remove the HDR+ pending request.
14448 {
14449 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14450 auto req = mHdrPlusPendingRequests.find(result->requestId);
14451 mHdrPlusPendingRequests.erase(req);
14452 }
14453 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014454}
14455
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014456void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14457 // TODO: Handle HDR+ capture failures and send the failure to framework.
14458 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14459 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14460
14461 // Return the buffer to pic channel.
14462 QCamera3PicChannel *picChannel =
14463 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14464 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14465
14466 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014467}
14468
Thierry Strudel3d639192016-09-09 11:52:26 -070014469}; //end namespace qcamera