blob: 80dcd00a32cd669c543b87196be2de0f74b35913 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Thierry Strudel3d639192016-09-09 11:52:26 -0700135
136const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
137 {"On", CAM_CDS_MODE_ON},
138 {"Off", CAM_CDS_MODE_OFF},
139 {"Auto",CAM_CDS_MODE_AUTO}
140};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700141const QCamera3HardwareInterface::QCameraMap<
142 camera_metadata_enum_android_video_hdr_mode_t,
143 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
144 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
145 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
146};
147
Thierry Strudel54dc9782017-02-15 12:12:10 -0800148const QCamera3HardwareInterface::QCameraMap<
149 camera_metadata_enum_android_binning_correction_mode_t,
150 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
151 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
152 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
153};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700154
155const QCamera3HardwareInterface::QCameraMap<
156 camera_metadata_enum_android_ir_mode_t,
157 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
158 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
159 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
160 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
161};
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraMap<
164 camera_metadata_enum_android_control_effect_mode_t,
165 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
166 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
167 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
168 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
169 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
170 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
171 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
173 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
174 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
175};
176
177const QCamera3HardwareInterface::QCameraMap<
178 camera_metadata_enum_android_control_awb_mode_t,
179 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
180 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
181 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
182 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
183 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
184 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
185 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
186 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
187 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
188 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
189};
190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_scene_mode_t,
193 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
194 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
195 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
196 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
197 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
198 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
199 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
200 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
201 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
202 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
203 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
204 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
205 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
206 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
207 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
208 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800209 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
210 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700211};
212
213const QCamera3HardwareInterface::QCameraMap<
214 camera_metadata_enum_android_control_af_mode_t,
215 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
216 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
217 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
218 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
219 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
220 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
221 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
222 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_color_correction_aberration_mode_t,
227 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
228 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
229 CAM_COLOR_CORRECTION_ABERRATION_OFF },
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
231 CAM_COLOR_CORRECTION_ABERRATION_FAST },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
233 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
234};
235
236const QCamera3HardwareInterface::QCameraMap<
237 camera_metadata_enum_android_control_ae_antibanding_mode_t,
238 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
239 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
240 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
243};
244
245const QCamera3HardwareInterface::QCameraMap<
246 camera_metadata_enum_android_control_ae_mode_t,
247 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
248 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
249 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
250 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
251 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
253};
254
255const QCamera3HardwareInterface::QCameraMap<
256 camera_metadata_enum_android_flash_mode_t,
257 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
258 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
259 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
260 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_statistics_face_detect_mode_t,
265 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
266 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
267 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
273 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
274 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
275 CAM_FOCUS_UNCALIBRATED },
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
277 CAM_FOCUS_APPROXIMATE },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
279 CAM_FOCUS_CALIBRATED }
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_lens_state_t,
284 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
285 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
286 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
287};
288
289const int32_t available_thumbnail_sizes[] = {0, 0,
290 176, 144,
291 240, 144,
292 256, 144,
293 240, 160,
294 256, 154,
295 240, 240,
296 320, 240};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_sensor_test_pattern_mode_t,
300 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
302 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
307};
308
309/* Since there is no mapping for all the options some Android enum are not listed.
310 * Also, the order in this list is important because while mapping from HAL to Android it will
311 * traverse from lower to higher index which means that for HAL values that are map to different
312 * Android values, the traverse logic will select the first one found.
313 */
314const QCamera3HardwareInterface::QCameraMap<
315 camera_metadata_enum_android_sensor_reference_illuminant1_t,
316 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
333};
334
335const QCamera3HardwareInterface::QCameraMap<
336 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
337 { 60, CAM_HFR_MODE_60FPS},
338 { 90, CAM_HFR_MODE_90FPS},
339 { 120, CAM_HFR_MODE_120FPS},
340 { 150, CAM_HFR_MODE_150FPS},
341 { 180, CAM_HFR_MODE_180FPS},
342 { 210, CAM_HFR_MODE_210FPS},
343 { 240, CAM_HFR_MODE_240FPS},
344 { 480, CAM_HFR_MODE_480FPS},
345};
346
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700347const QCamera3HardwareInterface::QCameraMap<
348 qcamera3_ext_instant_aec_mode_t,
349 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
350 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
351 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
352 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
353};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800354
355const QCamera3HardwareInterface::QCameraMap<
356 qcamera3_ext_exposure_meter_mode_t,
357 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
358 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
360 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
361 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
362 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
363 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
364 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
365};
366
367const QCamera3HardwareInterface::QCameraMap<
368 qcamera3_ext_iso_mode_t,
369 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
370 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
371 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
372 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
373 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
374 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
375 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
376 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
377 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
378};
379
Thierry Strudel3d639192016-09-09 11:52:26 -0700380camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
381 .initialize = QCamera3HardwareInterface::initialize,
382 .configure_streams = QCamera3HardwareInterface::configure_streams,
383 .register_stream_buffers = NULL,
384 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
385 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
386 .get_metadata_vendor_tag_ops = NULL,
387 .dump = QCamera3HardwareInterface::dump,
388 .flush = QCamera3HardwareInterface::flush,
389 .reserved = {0},
390};
391
392// initialise to some default value
393uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
394
395/*===========================================================================
396 * FUNCTION : QCamera3HardwareInterface
397 *
398 * DESCRIPTION: constructor of QCamera3HardwareInterface
399 *
400 * PARAMETERS :
401 * @cameraId : camera ID
402 *
403 * RETURN : none
404 *==========================================================================*/
405QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
406 const camera_module_callbacks_t *callbacks)
407 : mCameraId(cameraId),
408 mCameraHandle(NULL),
409 mCameraInitialized(false),
410 mCallbackOps(NULL),
411 mMetadataChannel(NULL),
412 mPictureChannel(NULL),
413 mRawChannel(NULL),
414 mSupportChannel(NULL),
415 mAnalysisChannel(NULL),
416 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700417 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700418 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800419 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800420 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mChannelHandle(0),
422 mFirstConfiguration(true),
423 mFlush(false),
424 mFlushPerf(false),
425 mParamHeap(NULL),
426 mParameters(NULL),
427 mPrevParameters(NULL),
428 m_bIsVideo(false),
429 m_bIs4KVideo(false),
430 m_bEisSupportedSize(false),
431 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800432 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700433 m_MobicatMask(0),
434 mMinProcessedFrameDuration(0),
435 mMinJpegFrameDuration(0),
436 mMinRawFrameDuration(0),
437 mMetaFrameCount(0U),
438 mUpdateDebugLevel(false),
439 mCallbacks(callbacks),
440 mCaptureIntent(0),
441 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700442 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800443 /* DevCamDebug metadata internal m control*/
444 mDevCamDebugMetaEnable(0),
445 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mBatchSize(0),
447 mToBeQueuedVidBufs(0),
448 mHFRVideoFps(DEFAULT_VIDEO_FPS),
449 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800450 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800451 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700452 mFirstFrameNumberInBatch(0),
453 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800454 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700455 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
456 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700457 mInstantAEC(false),
458 mResetInstantAEC(false),
459 mInstantAECSettledFrameNumber(0),
460 mAecSkipDisplayFrameBound(0),
461 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800462 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mLastCustIntentFrmNum(-1),
465 mState(CLOSED),
466 mIsDeviceLinked(false),
467 mIsMainCamera(true),
468 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700469 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800470 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800471 mHdrPlusModeEnabled(false),
472 mIsApInputUsedForHdrPlus(false),
473 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700475{
476 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 mCommon.init(gCamCapability[cameraId]);
478 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700479#ifndef USE_HAL_3_3
480 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
481#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mCameraDevice.common.close = close_camera_device;
485 mCameraDevice.ops = &mCameraOps;
486 mCameraDevice.priv = this;
487 gCamCapability[cameraId]->version = CAM_HAL_V3;
488 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
489 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
490 gCamCapability[cameraId]->min_num_pp_bufs = 3;
491
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800492 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700493
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800494 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mPendingLiveRequest = 0;
496 mCurrentRequestId = -1;
497 pthread_mutex_init(&mMutex, NULL);
498
499 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
500 mDefaultMetadata[i] = NULL;
501
502 // Getting system props of different kinds
503 char prop[PROPERTY_VALUE_MAX];
504 memset(prop, 0, sizeof(prop));
505 property_get("persist.camera.raw.dump", prop, "0");
506 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800507 property_get("persist.camera.hal3.force.hdr", prop, "0");
508 mForceHdrSnapshot = atoi(prop);
509
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 if (mEnableRawDump)
511 LOGD("Raw dump from Camera HAL enabled");
512
513 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
514 memset(mLdafCalib, 0, sizeof(mLdafCalib));
515
516 memset(prop, 0, sizeof(prop));
517 property_get("persist.camera.tnr.preview", prop, "0");
518 m_bTnrPreview = (uint8_t)atoi(prop);
519
520 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800521 property_get("persist.camera.swtnr.preview", prop, "1");
522 m_bSwTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 property_get("persist.camera.tnr.video", prop, "0");
526 m_bTnrVideo = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
529 property_get("persist.camera.avtimer.debug", prop, "0");
530 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800531 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Thierry Strudel54dc9782017-02-15 12:12:10 -0800533 memset(prop, 0, sizeof(prop));
534 property_get("persist.camera.cacmode.disable", prop, "0");
535 m_cacModeDisabled = (uint8_t)atoi(prop);
536
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 //Load and read GPU library.
538 lib_surface_utils = NULL;
539 LINK_get_surface_pixel_alignment = NULL;
540 mSurfaceStridePadding = CAM_PAD_TO_32;
541 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
542 if (lib_surface_utils) {
543 *(void **)&LINK_get_surface_pixel_alignment =
544 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
545 if (LINK_get_surface_pixel_alignment) {
546 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
547 }
548 dlclose(lib_surface_utils);
549 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700550
551 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700552}
553
554/*===========================================================================
555 * FUNCTION : ~QCamera3HardwareInterface
556 *
557 * DESCRIPTION: destructor of QCamera3HardwareInterface
558 *
559 * PARAMETERS : none
560 *
561 * RETURN : none
562 *==========================================================================*/
563QCamera3HardwareInterface::~QCamera3HardwareInterface()
564{
565 LOGD("E");
566
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800567 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700568
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800569 // Disable power hint and enable the perf lock for close camera
570 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
571 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
572
573 // unlink of dualcam during close camera
574 if (mIsDeviceLinked) {
575 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
576 &m_pDualCamCmdPtr->bundle_info;
577 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
578 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
579 pthread_mutex_lock(&gCamLock);
580
581 if (mIsMainCamera == 1) {
582 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
583 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
584 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
585 // related session id should be session id of linked session
586 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
587 } else {
588 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
589 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
590 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
591 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
592 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800593 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800594 pthread_mutex_unlock(&gCamLock);
595
596 rc = mCameraHandle->ops->set_dual_cam_cmd(
597 mCameraHandle->camera_handle);
598 if (rc < 0) {
599 LOGE("Dualcam: Unlink failed, but still proceed to close");
600 }
601 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700602
603 /* We need to stop all streams before deleting any stream */
604 if (mRawDumpChannel) {
605 mRawDumpChannel->stop();
606 }
607
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700608 if (mHdrPlusRawSrcChannel) {
609 mHdrPlusRawSrcChannel->stop();
610 }
611
Thierry Strudel3d639192016-09-09 11:52:26 -0700612 // NOTE: 'camera3_stream_t *' objects are already freed at
613 // this stage by the framework
614 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
615 it != mStreamInfo.end(); it++) {
616 QCamera3ProcessingChannel *channel = (*it)->channel;
617 if (channel) {
618 channel->stop();
619 }
620 }
621 if (mSupportChannel)
622 mSupportChannel->stop();
623
624 if (mAnalysisChannel) {
625 mAnalysisChannel->stop();
626 }
627 if (mMetadataChannel) {
628 mMetadataChannel->stop();
629 }
630 if (mChannelHandle) {
631 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
632 mChannelHandle);
633 LOGD("stopping channel %d", mChannelHandle);
634 }
635
636 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
637 it != mStreamInfo.end(); it++) {
638 QCamera3ProcessingChannel *channel = (*it)->channel;
639 if (channel)
640 delete channel;
641 free (*it);
642 }
643 if (mSupportChannel) {
644 delete mSupportChannel;
645 mSupportChannel = NULL;
646 }
647
648 if (mAnalysisChannel) {
649 delete mAnalysisChannel;
650 mAnalysisChannel = NULL;
651 }
652 if (mRawDumpChannel) {
653 delete mRawDumpChannel;
654 mRawDumpChannel = NULL;
655 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700656 if (mHdrPlusRawSrcChannel) {
657 delete mHdrPlusRawSrcChannel;
658 mHdrPlusRawSrcChannel = NULL;
659 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700660 if (mDummyBatchChannel) {
661 delete mDummyBatchChannel;
662 mDummyBatchChannel = NULL;
663 }
664
665 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800666 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700667
668 if (mMetadataChannel) {
669 delete mMetadataChannel;
670 mMetadataChannel = NULL;
671 }
672
673 /* Clean up all channels */
674 if (mCameraInitialized) {
675 if(!mFirstConfiguration){
676 //send the last unconfigure
677 cam_stream_size_info_t stream_config_info;
678 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
679 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
680 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800681 m_bIs4KVideo ? 0 :
682 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700683 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700684 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
685 stream_config_info);
686 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
687 if (rc < 0) {
688 LOGE("set_parms failed for unconfigure");
689 }
690 }
691 deinitParameters();
692 }
693
694 if (mChannelHandle) {
695 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
696 mChannelHandle);
697 LOGH("deleting channel %d", mChannelHandle);
698 mChannelHandle = 0;
699 }
700
701 if (mState != CLOSED)
702 closeCamera();
703
704 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
705 req.mPendingBufferList.clear();
706 }
707 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700708 for (pendingRequestIterator i = mPendingRequestsList.begin();
709 i != mPendingRequestsList.end();) {
710 i = erasePendingRequest(i);
711 }
712 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
713 if (mDefaultMetadata[i])
714 free_camera_metadata(mDefaultMetadata[i]);
715
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800716 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700717
718 pthread_cond_destroy(&mRequestCond);
719
720 pthread_cond_destroy(&mBuffersCond);
721
722 pthread_mutex_destroy(&mMutex);
723 LOGD("X");
724}
725
726/*===========================================================================
727 * FUNCTION : erasePendingRequest
728 *
729 * DESCRIPTION: function to erase a desired pending request after freeing any
730 * allocated memory
731 *
732 * PARAMETERS :
733 * @i : iterator pointing to pending request to be erased
734 *
735 * RETURN : iterator pointing to the next request
736 *==========================================================================*/
737QCamera3HardwareInterface::pendingRequestIterator
738 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
739{
740 if (i->input_buffer != NULL) {
741 free(i->input_buffer);
742 i->input_buffer = NULL;
743 }
744 if (i->settings != NULL)
745 free_camera_metadata((camera_metadata_t*)i->settings);
746 return mPendingRequestsList.erase(i);
747}
748
749/*===========================================================================
750 * FUNCTION : camEvtHandle
751 *
752 * DESCRIPTION: Function registered to mm-camera-interface to handle events
753 *
754 * PARAMETERS :
755 * @camera_handle : interface layer camera handle
756 * @evt : ptr to event
757 * @user_data : user data ptr
758 *
759 * RETURN : none
760 *==========================================================================*/
761void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
762 mm_camera_event_t *evt,
763 void *user_data)
764{
765 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
766 if (obj && evt) {
767 switch(evt->server_event_type) {
768 case CAM_EVENT_TYPE_DAEMON_DIED:
769 pthread_mutex_lock(&obj->mMutex);
770 obj->mState = ERROR;
771 pthread_mutex_unlock(&obj->mMutex);
772 LOGE("Fatal, camera daemon died");
773 break;
774
775 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
776 LOGD("HAL got request pull from Daemon");
777 pthread_mutex_lock(&obj->mMutex);
778 obj->mWokenUpByDaemon = true;
779 obj->unblockRequestIfNecessary();
780 pthread_mutex_unlock(&obj->mMutex);
781 break;
782
783 default:
784 LOGW("Warning: Unhandled event %d",
785 evt->server_event_type);
786 break;
787 }
788 } else {
789 LOGE("NULL user_data/evt");
790 }
791}
792
793/*===========================================================================
794 * FUNCTION : openCamera
795 *
796 * DESCRIPTION: open camera
797 *
798 * PARAMETERS :
799 * @hw_device : double ptr for camera device struct
800 *
801 * RETURN : int32_t type of status
802 * NO_ERROR -- success
803 * none-zero failure code
804 *==========================================================================*/
805int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
806{
807 int rc = 0;
808 if (mState != CLOSED) {
809 *hw_device = NULL;
810 return PERMISSION_DENIED;
811 }
812
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800813 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700814 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
815 mCameraId);
816
817 rc = openCamera();
818 if (rc == 0) {
819 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800822 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700823
Thierry Strudel3d639192016-09-09 11:52:26 -0700824 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
825 mCameraId, rc);
826
827 if (rc == NO_ERROR) {
828 mState = OPENED;
829 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800830
831 mIsApInputUsedForHdrPlus =
832 property_get_bool("persist.camera.hdrplus.apinput", false);
833 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
834 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
835
Thierry Strudel3d639192016-09-09 11:52:26 -0700836 return rc;
837}
838
839/*===========================================================================
840 * FUNCTION : openCamera
841 *
842 * DESCRIPTION: open camera
843 *
844 * PARAMETERS : none
845 *
846 * RETURN : int32_t type of status
847 * NO_ERROR -- success
848 * none-zero failure code
849 *==========================================================================*/
850int QCamera3HardwareInterface::openCamera()
851{
852 int rc = 0;
853 char value[PROPERTY_VALUE_MAX];
854
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800855 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 if (mCameraHandle) {
857 LOGE("Failure: Camera already opened");
858 return ALREADY_EXISTS;
859 }
860
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800861 if (gHdrPlusClient != nullptr) {
862 rc = gHdrPlusClient->resumeEasel();
863 if (rc != 0) {
864 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
865 return rc;
866 }
867 }
868
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
870 if (rc < 0) {
871 LOGE("Failed to reserve flash for camera id: %d",
872 mCameraId);
873 return UNKNOWN_ERROR;
874 }
875
876 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
877 if (rc) {
878 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
879 return rc;
880 }
881
882 if (!mCameraHandle) {
883 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
884 return -ENODEV;
885 }
886
887 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
888 camEvtHandle, (void *)this);
889
890 if (rc < 0) {
891 LOGE("Error, failed to register event callback");
892 /* Not closing camera here since it is already handled in destructor */
893 return FAILED_TRANSACTION;
894 }
895
896 mExifParams.debug_params =
897 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
898 if (mExifParams.debug_params) {
899 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
900 } else {
901 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
902 return NO_MEMORY;
903 }
904 mFirstConfiguration = true;
905
906 //Notify display HAL that a camera session is active.
907 //But avoid calling the same during bootup because camera service might open/close
908 //cameras at boot time during its initialization and display service will also internally
909 //wait for camera service to initialize first while calling this display API, resulting in a
910 //deadlock situation. Since boot time camera open/close calls are made only to fetch
911 //capabilities, no need of this display bw optimization.
912 //Use "service.bootanim.exit" property to know boot status.
913 property_get("service.bootanim.exit", value, "0");
914 if (atoi(value) == 1) {
915 pthread_mutex_lock(&gCamLock);
916 if (gNumCameraSessions++ == 0) {
917 setCameraLaunchStatus(true);
918 }
919 pthread_mutex_unlock(&gCamLock);
920 }
921
922 //fill the session id needed while linking dual cam
923 pthread_mutex_lock(&gCamLock);
924 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
925 &sessionId[mCameraId]);
926 pthread_mutex_unlock(&gCamLock);
927
928 if (rc < 0) {
929 LOGE("Error, failed to get sessiion id");
930 return UNKNOWN_ERROR;
931 } else {
932 //Allocate related cam sync buffer
933 //this is needed for the payload that goes along with bundling cmd for related
934 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700935 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
936 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700937 if(rc != OK) {
938 rc = NO_MEMORY;
939 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
940 return NO_MEMORY;
941 }
942
943 //Map memory for related cam sync buffer
944 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700945 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
946 m_pDualCamCmdHeap->getFd(0),
947 sizeof(cam_dual_camera_cmd_info_t),
948 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700949 if(rc < 0) {
950 LOGE("Dualcam: failed to map Related cam sync buffer");
951 rc = FAILED_TRANSACTION;
952 return NO_MEMORY;
953 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 m_pDualCamCmdPtr =
955 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700956 }
957
958 LOGH("mCameraId=%d",mCameraId);
959
960 return NO_ERROR;
961}
962
963/*===========================================================================
964 * FUNCTION : closeCamera
965 *
966 * DESCRIPTION: close camera
967 *
968 * PARAMETERS : none
969 *
970 * RETURN : int32_t type of status
971 * NO_ERROR -- success
972 * none-zero failure code
973 *==========================================================================*/
974int QCamera3HardwareInterface::closeCamera()
975{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800976 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700977 int rc = NO_ERROR;
978 char value[PROPERTY_VALUE_MAX];
979
980 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
981 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700982
983 // unmap memory for related cam sync buffer
984 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 if (NULL != m_pDualCamCmdHeap) {
987 m_pDualCamCmdHeap->deallocate();
988 delete m_pDualCamCmdHeap;
989 m_pDualCamCmdHeap = NULL;
990 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991 }
992
Thierry Strudel3d639192016-09-09 11:52:26 -0700993 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
994 mCameraHandle = NULL;
995
996 //reset session id to some invalid id
997 pthread_mutex_lock(&gCamLock);
998 sessionId[mCameraId] = 0xDEADBEEF;
999 pthread_mutex_unlock(&gCamLock);
1000
1001 //Notify display HAL that there is no active camera session
1002 //but avoid calling the same during bootup. Refer to openCamera
1003 //for more details.
1004 property_get("service.bootanim.exit", value, "0");
1005 if (atoi(value) == 1) {
1006 pthread_mutex_lock(&gCamLock);
1007 if (--gNumCameraSessions == 0) {
1008 setCameraLaunchStatus(false);
1009 }
1010 pthread_mutex_unlock(&gCamLock);
1011 }
1012
Thierry Strudel3d639192016-09-09 11:52:26 -07001013 if (mExifParams.debug_params) {
1014 free(mExifParams.debug_params);
1015 mExifParams.debug_params = NULL;
1016 }
1017 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1018 LOGW("Failed to release flash for camera id: %d",
1019 mCameraId);
1020 }
1021 mState = CLOSED;
1022 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1023 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001024
1025 if (gHdrPlusClient != nullptr) {
1026 rc = gHdrPlusClient->suspendEasel();
1027 if (rc != 0) {
1028 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1029 }
1030 }
1031
Thierry Strudel3d639192016-09-09 11:52:26 -07001032 return rc;
1033}
1034
1035/*===========================================================================
1036 * FUNCTION : initialize
1037 *
1038 * DESCRIPTION: Initialize frameworks callback functions
1039 *
1040 * PARAMETERS :
1041 * @callback_ops : callback function to frameworks
1042 *
1043 * RETURN :
1044 *
1045 *==========================================================================*/
1046int QCamera3HardwareInterface::initialize(
1047 const struct camera3_callback_ops *callback_ops)
1048{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001049 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001050 int rc;
1051
1052 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1053 pthread_mutex_lock(&mMutex);
1054
1055 // Validate current state
1056 switch (mState) {
1057 case OPENED:
1058 /* valid state */
1059 break;
1060 default:
1061 LOGE("Invalid state %d", mState);
1062 rc = -ENODEV;
1063 goto err1;
1064 }
1065
1066 rc = initParameters();
1067 if (rc < 0) {
1068 LOGE("initParamters failed %d", rc);
1069 goto err1;
1070 }
1071 mCallbackOps = callback_ops;
1072
1073 mChannelHandle = mCameraHandle->ops->add_channel(
1074 mCameraHandle->camera_handle, NULL, NULL, this);
1075 if (mChannelHandle == 0) {
1076 LOGE("add_channel failed");
1077 rc = -ENOMEM;
1078 pthread_mutex_unlock(&mMutex);
1079 return rc;
1080 }
1081
1082 pthread_mutex_unlock(&mMutex);
1083 mCameraInitialized = true;
1084 mState = INITIALIZED;
1085 LOGI("X");
1086 return 0;
1087
1088err1:
1089 pthread_mutex_unlock(&mMutex);
1090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : validateStreamDimensions
1095 *
1096 * DESCRIPTION: Check if the configuration requested are those advertised
1097 *
1098 * PARAMETERS :
1099 * @stream_list : streams to be configured
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::validateStreamDimensions(
1105 camera3_stream_configuration_t *streamList)
1106{
1107 int rc = NO_ERROR;
1108 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001109 uint32_t depthWidth =
1110 gCamCapability[mCameraId]->active_array_size.width;
1111 uint32_t depthHeight =
1112 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001113
1114 camera3_stream_t *inputStream = NULL;
1115 /*
1116 * Loop through all streams to find input stream if it exists*
1117 */
1118 for (size_t i = 0; i< streamList->num_streams; i++) {
1119 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1120 if (inputStream != NULL) {
1121 LOGE("Error, Multiple input streams requested");
1122 return -EINVAL;
1123 }
1124 inputStream = streamList->streams[i];
1125 }
1126 }
1127 /*
1128 * Loop through all streams requested in configuration
1129 * Check if unsupported sizes have been requested on any of them
1130 */
1131 for (size_t j = 0; j < streamList->num_streams; j++) {
1132 bool sizeFound = false;
1133 camera3_stream_t *newStream = streamList->streams[j];
1134
1135 uint32_t rotatedHeight = newStream->height;
1136 uint32_t rotatedWidth = newStream->width;
1137 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1138 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1139 rotatedHeight = newStream->width;
1140 rotatedWidth = newStream->height;
1141 }
1142
1143 /*
1144 * Sizes are different for each type of stream format check against
1145 * appropriate table.
1146 */
1147 switch (newStream->format) {
1148 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1149 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1150 case HAL_PIXEL_FORMAT_RAW10:
1151 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1152 for (size_t i = 0; i < count; i++) {
1153 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1154 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1155 sizeFound = true;
1156 break;
1157 }
1158 }
1159 break;
1160 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001161 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1162 //As per spec. depth cloud should be sample count / 16
1163 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1164 if ((depthSamplesCount == newStream->width) &&
1165 (1 == newStream->height)) {
1166 sizeFound = true;
1167 }
1168 break;
1169 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001170 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1171 /* Verify set size against generated sizes table */
1172 for (size_t i = 0; i < count; i++) {
1173 if (((int32_t)rotatedWidth ==
1174 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1175 ((int32_t)rotatedHeight ==
1176 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1177 sizeFound = true;
1178 break;
1179 }
1180 }
1181 break;
1182 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1183 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1184 default:
1185 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1186 || newStream->stream_type == CAMERA3_STREAM_INPUT
1187 || IS_USAGE_ZSL(newStream->usage)) {
1188 if (((int32_t)rotatedWidth ==
1189 gCamCapability[mCameraId]->active_array_size.width) &&
1190 ((int32_t)rotatedHeight ==
1191 gCamCapability[mCameraId]->active_array_size.height)) {
1192 sizeFound = true;
1193 break;
1194 }
1195 /* We could potentially break here to enforce ZSL stream
1196 * set from frameworks always is full active array size
1197 * but it is not clear from the spc if framework will always
1198 * follow that, also we have logic to override to full array
1199 * size, so keeping the logic lenient at the moment
1200 */
1201 }
1202 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1203 MAX_SIZES_CNT);
1204 for (size_t i = 0; i < count; i++) {
1205 if (((int32_t)rotatedWidth ==
1206 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1207 ((int32_t)rotatedHeight ==
1208 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1209 sizeFound = true;
1210 break;
1211 }
1212 }
1213 break;
1214 } /* End of switch(newStream->format) */
1215
1216 /* We error out even if a single stream has unsupported size set */
1217 if (!sizeFound) {
1218 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1219 rotatedWidth, rotatedHeight, newStream->format,
1220 gCamCapability[mCameraId]->active_array_size.width,
1221 gCamCapability[mCameraId]->active_array_size.height);
1222 rc = -EINVAL;
1223 break;
1224 }
1225 } /* End of for each stream */
1226 return rc;
1227}
1228
1229/*==============================================================================
1230 * FUNCTION : isSupportChannelNeeded
1231 *
1232 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1233 *
1234 * PARAMETERS :
1235 * @stream_list : streams to be configured
1236 * @stream_config_info : the config info for streams to be configured
1237 *
1238 * RETURN : Boolen true/false decision
1239 *
1240 *==========================================================================*/
1241bool QCamera3HardwareInterface::isSupportChannelNeeded(
1242 camera3_stream_configuration_t *streamList,
1243 cam_stream_size_info_t stream_config_info)
1244{
1245 uint32_t i;
1246 bool pprocRequested = false;
1247 /* Check for conditions where PProc pipeline does not have any streams*/
1248 for (i = 0; i < stream_config_info.num_streams; i++) {
1249 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1250 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1251 pprocRequested = true;
1252 break;
1253 }
1254 }
1255
1256 if (pprocRequested == false )
1257 return true;
1258
1259 /* Dummy stream needed if only raw or jpeg streams present */
1260 for (i = 0; i < streamList->num_streams; i++) {
1261 switch(streamList->streams[i]->format) {
1262 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1263 case HAL_PIXEL_FORMAT_RAW10:
1264 case HAL_PIXEL_FORMAT_RAW16:
1265 case HAL_PIXEL_FORMAT_BLOB:
1266 break;
1267 default:
1268 return false;
1269 }
1270 }
1271 return true;
1272}
1273
1274/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001275 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001277 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001278 *
1279 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001280 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 *
1282 * RETURN : int32_t type of status
1283 * NO_ERROR -- success
1284 * none-zero failure code
1285 *
1286 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001287int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001288{
1289 int32_t rc = NO_ERROR;
1290
1291 cam_dimension_t max_dim = {0, 0};
1292 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1293 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1294 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1295 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1296 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1297 }
1298
1299 clear_metadata_buffer(mParameters);
1300
1301 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1302 max_dim);
1303 if (rc != NO_ERROR) {
1304 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1305 return rc;
1306 }
1307
1308 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1309 if (rc != NO_ERROR) {
1310 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1311 return rc;
1312 }
1313
1314 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001315 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001316
1317 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1318 mParameters);
1319 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001320 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001321 return rc;
1322 }
1323
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001324 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001325 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1326 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1327 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1328 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1329 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001330
1331 return rc;
1332}
1333
1334/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001335 * FUNCTION : addToPPFeatureMask
1336 *
1337 * DESCRIPTION: add additional features to pp feature mask based on
1338 * stream type and usecase
1339 *
1340 * PARAMETERS :
1341 * @stream_format : stream type for feature mask
1342 * @stream_idx : stream idx within postprocess_mask list to change
1343 *
1344 * RETURN : NULL
1345 *
1346 *==========================================================================*/
1347void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1348 uint32_t stream_idx)
1349{
1350 char feature_mask_value[PROPERTY_VALUE_MAX];
1351 cam_feature_mask_t feature_mask;
1352 int args_converted;
1353 int property_len;
1354
1355 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001356#ifdef _LE_CAMERA_
1357 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1358 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1359 property_len = property_get("persist.camera.hal3.feature",
1360 feature_mask_value, swtnr_feature_mask_value);
1361#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001362 property_len = property_get("persist.camera.hal3.feature",
1363 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001364#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001365 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1366 (feature_mask_value[1] == 'x')) {
1367 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1368 } else {
1369 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1370 }
1371 if (1 != args_converted) {
1372 feature_mask = 0;
1373 LOGE("Wrong feature mask %s", feature_mask_value);
1374 return;
1375 }
1376
1377 switch (stream_format) {
1378 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1379 /* Add LLVD to pp feature mask only if video hint is enabled */
1380 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1381 mStreamConfigInfo.postprocess_mask[stream_idx]
1382 |= CAM_QTI_FEATURE_SW_TNR;
1383 LOGH("Added SW TNR to pp feature mask");
1384 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1385 mStreamConfigInfo.postprocess_mask[stream_idx]
1386 |= CAM_QCOM_FEATURE_LLVD;
1387 LOGH("Added LLVD SeeMore to pp feature mask");
1388 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001389 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1390 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1391 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1392 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001393 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1394 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1395 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1396 CAM_QTI_FEATURE_BINNING_CORRECTION;
1397 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001398 break;
1399 }
1400 default:
1401 break;
1402 }
1403 LOGD("PP feature mask %llx",
1404 mStreamConfigInfo.postprocess_mask[stream_idx]);
1405}
1406
1407/*==============================================================================
1408 * FUNCTION : updateFpsInPreviewBuffer
1409 *
1410 * DESCRIPTION: update FPS information in preview buffer.
1411 *
1412 * PARAMETERS :
1413 * @metadata : pointer to metadata buffer
1414 * @frame_number: frame_number to look for in pending buffer list
1415 *
1416 * RETURN : None
1417 *
1418 *==========================================================================*/
1419void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1420 uint32_t frame_number)
1421{
1422 // Mark all pending buffers for this particular request
1423 // with corresponding framerate information
1424 for (List<PendingBuffersInRequest>::iterator req =
1425 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1426 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1427 for(List<PendingBufferInfo>::iterator j =
1428 req->mPendingBufferList.begin();
1429 j != req->mPendingBufferList.end(); j++) {
1430 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1431 if ((req->frame_number == frame_number) &&
1432 (channel->getStreamTypeMask() &
1433 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1434 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1435 CAM_INTF_PARM_FPS_RANGE, metadata) {
1436 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1437 struct private_handle_t *priv_handle =
1438 (struct private_handle_t *)(*(j->buffer));
1439 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1440 }
1441 }
1442 }
1443 }
1444}
1445
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001446/*==============================================================================
1447 * FUNCTION : updateTimeStampInPendingBuffers
1448 *
1449 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1450 * of a frame number
1451 *
1452 * PARAMETERS :
1453 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1454 * @timestamp : timestamp to be set
1455 *
1456 * RETURN : None
1457 *
1458 *==========================================================================*/
1459void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1460 uint32_t frameNumber, nsecs_t timestamp)
1461{
1462 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1463 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1464 if (req->frame_number != frameNumber)
1465 continue;
1466
1467 for (auto k = req->mPendingBufferList.begin();
1468 k != req->mPendingBufferList.end(); k++ ) {
1469 struct private_handle_t *priv_handle =
1470 (struct private_handle_t *) (*(k->buffer));
1471 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1472 }
1473 }
1474 return;
1475}
1476
Thierry Strudel3d639192016-09-09 11:52:26 -07001477/*===========================================================================
1478 * FUNCTION : configureStreams
1479 *
1480 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1481 * and output streams.
1482 *
1483 * PARAMETERS :
1484 * @stream_list : streams to be configured
1485 *
1486 * RETURN :
1487 *
1488 *==========================================================================*/
1489int QCamera3HardwareInterface::configureStreams(
1490 camera3_stream_configuration_t *streamList)
1491{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001492 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001493 int rc = 0;
1494
1495 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001496 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001498 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001499
1500 return rc;
1501}
1502
1503/*===========================================================================
1504 * FUNCTION : configureStreamsPerfLocked
1505 *
1506 * DESCRIPTION: configureStreams while perfLock is held.
1507 *
1508 * PARAMETERS :
1509 * @stream_list : streams to be configured
1510 *
1511 * RETURN : int32_t type of status
1512 * NO_ERROR -- success
1513 * none-zero failure code
1514 *==========================================================================*/
1515int QCamera3HardwareInterface::configureStreamsPerfLocked(
1516 camera3_stream_configuration_t *streamList)
1517{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001518 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001519 int rc = 0;
1520
1521 // Sanity check stream_list
1522 if (streamList == NULL) {
1523 LOGE("NULL stream configuration");
1524 return BAD_VALUE;
1525 }
1526 if (streamList->streams == NULL) {
1527 LOGE("NULL stream list");
1528 return BAD_VALUE;
1529 }
1530
1531 if (streamList->num_streams < 1) {
1532 LOGE("Bad number of streams requested: %d",
1533 streamList->num_streams);
1534 return BAD_VALUE;
1535 }
1536
1537 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1538 LOGE("Maximum number of streams %d exceeded: %d",
1539 MAX_NUM_STREAMS, streamList->num_streams);
1540 return BAD_VALUE;
1541 }
1542
1543 mOpMode = streamList->operation_mode;
1544 LOGD("mOpMode: %d", mOpMode);
1545
1546 /* first invalidate all the steams in the mStreamList
1547 * if they appear again, they will be validated */
1548 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1549 it != mStreamInfo.end(); it++) {
1550 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1551 if (channel) {
1552 channel->stop();
1553 }
1554 (*it)->status = INVALID;
1555 }
1556
1557 if (mRawDumpChannel) {
1558 mRawDumpChannel->stop();
1559 delete mRawDumpChannel;
1560 mRawDumpChannel = NULL;
1561 }
1562
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001563 if (mHdrPlusRawSrcChannel) {
1564 mHdrPlusRawSrcChannel->stop();
1565 delete mHdrPlusRawSrcChannel;
1566 mHdrPlusRawSrcChannel = NULL;
1567 }
1568
Thierry Strudel3d639192016-09-09 11:52:26 -07001569 if (mSupportChannel)
1570 mSupportChannel->stop();
1571
1572 if (mAnalysisChannel) {
1573 mAnalysisChannel->stop();
1574 }
1575 if (mMetadataChannel) {
1576 /* If content of mStreamInfo is not 0, there is metadata stream */
1577 mMetadataChannel->stop();
1578 }
1579 if (mChannelHandle) {
1580 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1581 mChannelHandle);
1582 LOGD("stopping channel %d", mChannelHandle);
1583 }
1584
1585 pthread_mutex_lock(&mMutex);
1586
1587 // Check state
1588 switch (mState) {
1589 case INITIALIZED:
1590 case CONFIGURED:
1591 case STARTED:
1592 /* valid state */
1593 break;
1594 default:
1595 LOGE("Invalid state %d", mState);
1596 pthread_mutex_unlock(&mMutex);
1597 return -ENODEV;
1598 }
1599
1600 /* Check whether we have video stream */
1601 m_bIs4KVideo = false;
1602 m_bIsVideo = false;
1603 m_bEisSupportedSize = false;
1604 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001605 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001606 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001607 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 uint32_t videoWidth = 0U;
1609 uint32_t videoHeight = 0U;
1610 size_t rawStreamCnt = 0;
1611 size_t stallStreamCnt = 0;
1612 size_t processedStreamCnt = 0;
1613 // Number of streams on ISP encoder path
1614 size_t numStreamsOnEncoder = 0;
1615 size_t numYuv888OnEncoder = 0;
1616 bool bYuv888OverrideJpeg = false;
1617 cam_dimension_t largeYuv888Size = {0, 0};
1618 cam_dimension_t maxViewfinderSize = {0, 0};
1619 bool bJpegExceeds4K = false;
1620 bool bJpegOnEncoder = false;
1621 bool bUseCommonFeatureMask = false;
1622 cam_feature_mask_t commonFeatureMask = 0;
1623 bool bSmallJpegSize = false;
1624 uint32_t width_ratio;
1625 uint32_t height_ratio;
1626 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1627 camera3_stream_t *inputStream = NULL;
1628 bool isJpeg = false;
1629 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001630 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001631
1632 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1633
1634 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001635 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001636 uint8_t eis_prop_set;
1637 uint32_t maxEisWidth = 0;
1638 uint32_t maxEisHeight = 0;
1639
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001640 // Initialize all instant AEC related variables
1641 mInstantAEC = false;
1642 mResetInstantAEC = false;
1643 mInstantAECSettledFrameNumber = 0;
1644 mAecSkipDisplayFrameBound = 0;
1645 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001646 mCurrFeatureState = 0;
1647 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001648
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1650
1651 size_t count = IS_TYPE_MAX;
1652 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1653 for (size_t i = 0; i < count; i++) {
1654 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001655 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1656 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001657 break;
1658 }
1659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001660 count = CAM_OPT_STAB_MAX;
1661 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1662 for (size_t i = 0; i < count; i++) {
1663 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1664 oisSupported = true;
1665 break;
1666 }
1667 }
1668
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001669 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001670 maxEisWidth = MAX_EIS_WIDTH;
1671 maxEisHeight = MAX_EIS_HEIGHT;
1672 }
1673
1674 /* EIS setprop control */
1675 char eis_prop[PROPERTY_VALUE_MAX];
1676 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001677 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001678 eis_prop_set = (uint8_t)atoi(eis_prop);
1679
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001680 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001681 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1682
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001683 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1684 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1685
Thierry Strudel3d639192016-09-09 11:52:26 -07001686 /* stream configurations */
1687 for (size_t i = 0; i < streamList->num_streams; i++) {
1688 camera3_stream_t *newStream = streamList->streams[i];
1689 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1690 "height = %d, rotation = %d, usage = 0x%x",
1691 i, newStream->stream_type, newStream->format,
1692 newStream->width, newStream->height, newStream->rotation,
1693 newStream->usage);
1694 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1695 newStream->stream_type == CAMERA3_STREAM_INPUT){
1696 isZsl = true;
1697 }
1698 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1699 inputStream = newStream;
1700 }
1701
Emilian Peev7650c122017-01-19 08:24:33 -08001702 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1703 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001704 isJpeg = true;
1705 jpegSize.width = newStream->width;
1706 jpegSize.height = newStream->height;
1707 if (newStream->width > VIDEO_4K_WIDTH ||
1708 newStream->height > VIDEO_4K_HEIGHT)
1709 bJpegExceeds4K = true;
1710 }
1711
1712 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1713 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1714 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001715 // In HAL3 we can have multiple different video streams.
1716 // The variables video width and height are used below as
1717 // dimensions of the biggest of them
1718 if (videoWidth < newStream->width ||
1719 videoHeight < newStream->height) {
1720 videoWidth = newStream->width;
1721 videoHeight = newStream->height;
1722 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001723 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1724 (VIDEO_4K_HEIGHT <= newStream->height)) {
1725 m_bIs4KVideo = true;
1726 }
1727 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1728 (newStream->height <= maxEisHeight);
1729 }
1730 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1731 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1732 switch (newStream->format) {
1733 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001734 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1735 depthPresent = true;
1736 break;
1737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001738 stallStreamCnt++;
1739 if (isOnEncoder(maxViewfinderSize, newStream->width,
1740 newStream->height)) {
1741 numStreamsOnEncoder++;
1742 bJpegOnEncoder = true;
1743 }
1744 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1745 newStream->width);
1746 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1747 newStream->height);;
1748 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1749 "FATAL: max_downscale_factor cannot be zero and so assert");
1750 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1751 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1752 LOGH("Setting small jpeg size flag to true");
1753 bSmallJpegSize = true;
1754 }
1755 break;
1756 case HAL_PIXEL_FORMAT_RAW10:
1757 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1758 case HAL_PIXEL_FORMAT_RAW16:
1759 rawStreamCnt++;
1760 break;
1761 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1762 processedStreamCnt++;
1763 if (isOnEncoder(maxViewfinderSize, newStream->width,
1764 newStream->height)) {
1765 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1766 !IS_USAGE_ZSL(newStream->usage)) {
1767 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1768 }
1769 numStreamsOnEncoder++;
1770 }
1771 break;
1772 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1773 processedStreamCnt++;
1774 if (isOnEncoder(maxViewfinderSize, newStream->width,
1775 newStream->height)) {
1776 // If Yuv888 size is not greater than 4K, set feature mask
1777 // to SUPERSET so that it support concurrent request on
1778 // YUV and JPEG.
1779 if (newStream->width <= VIDEO_4K_WIDTH &&
1780 newStream->height <= VIDEO_4K_HEIGHT) {
1781 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1782 }
1783 numStreamsOnEncoder++;
1784 numYuv888OnEncoder++;
1785 largeYuv888Size.width = newStream->width;
1786 largeYuv888Size.height = newStream->height;
1787 }
1788 break;
1789 default:
1790 processedStreamCnt++;
1791 if (isOnEncoder(maxViewfinderSize, newStream->width,
1792 newStream->height)) {
1793 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1794 numStreamsOnEncoder++;
1795 }
1796 break;
1797 }
1798
1799 }
1800 }
1801
1802 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1803 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1804 !m_bIsVideo) {
1805 m_bEisEnable = false;
1806 }
1807
Thierry Strudel54dc9782017-02-15 12:12:10 -08001808 uint8_t forceEnableTnr = 0;
1809 char tnr_prop[PROPERTY_VALUE_MAX];
1810 memset(tnr_prop, 0, sizeof(tnr_prop));
1811 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1812 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1813
Thierry Strudel3d639192016-09-09 11:52:26 -07001814 /* Logic to enable/disable TNR based on specific config size/etc.*/
1815 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1816 ((videoWidth == 1920 && videoHeight == 1080) ||
1817 (videoWidth == 1280 && videoHeight == 720)) &&
1818 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1819 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001820 else if (forceEnableTnr)
1821 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001822
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001823 char videoHdrProp[PROPERTY_VALUE_MAX];
1824 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1825 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1826 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1827
1828 if (hdr_mode_prop == 1 && m_bIsVideo &&
1829 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1830 m_bVideoHdrEnabled = true;
1831 else
1832 m_bVideoHdrEnabled = false;
1833
1834
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 /* Check if num_streams is sane */
1836 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1837 rawStreamCnt > MAX_RAW_STREAMS ||
1838 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1839 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1840 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1841 pthread_mutex_unlock(&mMutex);
1842 return -EINVAL;
1843 }
1844 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001845 if (isZsl && m_bIs4KVideo) {
1846 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001847 pthread_mutex_unlock(&mMutex);
1848 return -EINVAL;
1849 }
1850 /* Check if stream sizes are sane */
1851 if (numStreamsOnEncoder > 2) {
1852 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1853 pthread_mutex_unlock(&mMutex);
1854 return -EINVAL;
1855 } else if (1 < numStreamsOnEncoder){
1856 bUseCommonFeatureMask = true;
1857 LOGH("Multiple streams above max viewfinder size, common mask needed");
1858 }
1859
1860 /* Check if BLOB size is greater than 4k in 4k recording case */
1861 if (m_bIs4KVideo && bJpegExceeds4K) {
1862 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1863 pthread_mutex_unlock(&mMutex);
1864 return -EINVAL;
1865 }
1866
Emilian Peev7650c122017-01-19 08:24:33 -08001867 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1868 depthPresent) {
1869 LOGE("HAL doesn't support depth streams in HFR mode!");
1870 pthread_mutex_unlock(&mMutex);
1871 return -EINVAL;
1872 }
1873
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1875 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1876 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1877 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1878 // configurations:
1879 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1880 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1881 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1882 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1883 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1884 __func__);
1885 pthread_mutex_unlock(&mMutex);
1886 return -EINVAL;
1887 }
1888
1889 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1890 // the YUV stream's size is greater or equal to the JPEG size, set common
1891 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1892 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1893 jpegSize.width, jpegSize.height) &&
1894 largeYuv888Size.width > jpegSize.width &&
1895 largeYuv888Size.height > jpegSize.height) {
1896 bYuv888OverrideJpeg = true;
1897 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1898 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900
1901 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1902 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1903 commonFeatureMask);
1904 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1905 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1906
1907 rc = validateStreamDimensions(streamList);
1908 if (rc == NO_ERROR) {
1909 rc = validateStreamRotations(streamList);
1910 }
1911 if (rc != NO_ERROR) {
1912 LOGE("Invalid stream configuration requested!");
1913 pthread_mutex_unlock(&mMutex);
1914 return rc;
1915 }
1916
1917 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1918 for (size_t i = 0; i < streamList->num_streams; i++) {
1919 camera3_stream_t *newStream = streamList->streams[i];
1920 LOGH("newStream type = %d, stream format = %d "
1921 "stream size : %d x %d, stream rotation = %d",
1922 newStream->stream_type, newStream->format,
1923 newStream->width, newStream->height, newStream->rotation);
1924 //if the stream is in the mStreamList validate it
1925 bool stream_exists = false;
1926 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1927 it != mStreamInfo.end(); it++) {
1928 if ((*it)->stream == newStream) {
1929 QCamera3ProcessingChannel *channel =
1930 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1931 stream_exists = true;
1932 if (channel)
1933 delete channel;
1934 (*it)->status = VALID;
1935 (*it)->stream->priv = NULL;
1936 (*it)->channel = NULL;
1937 }
1938 }
1939 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1940 //new stream
1941 stream_info_t* stream_info;
1942 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1943 if (!stream_info) {
1944 LOGE("Could not allocate stream info");
1945 rc = -ENOMEM;
1946 pthread_mutex_unlock(&mMutex);
1947 return rc;
1948 }
1949 stream_info->stream = newStream;
1950 stream_info->status = VALID;
1951 stream_info->channel = NULL;
1952 mStreamInfo.push_back(stream_info);
1953 }
1954 /* Covers Opaque ZSL and API1 F/W ZSL */
1955 if (IS_USAGE_ZSL(newStream->usage)
1956 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1957 if (zslStream != NULL) {
1958 LOGE("Multiple input/reprocess streams requested!");
1959 pthread_mutex_unlock(&mMutex);
1960 return BAD_VALUE;
1961 }
1962 zslStream = newStream;
1963 }
1964 /* Covers YUV reprocess */
1965 if (inputStream != NULL) {
1966 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1967 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1968 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1969 && inputStream->width == newStream->width
1970 && inputStream->height == newStream->height) {
1971 if (zslStream != NULL) {
1972 /* This scenario indicates multiple YUV streams with same size
1973 * as input stream have been requested, since zsl stream handle
1974 * is solely use for the purpose of overriding the size of streams
1975 * which share h/w streams we will just make a guess here as to
1976 * which of the stream is a ZSL stream, this will be refactored
1977 * once we make generic logic for streams sharing encoder output
1978 */
1979 LOGH("Warning, Multiple ip/reprocess streams requested!");
1980 }
1981 zslStream = newStream;
1982 }
1983 }
1984 }
1985
1986 /* If a zsl stream is set, we know that we have configured at least one input or
1987 bidirectional stream */
1988 if (NULL != zslStream) {
1989 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1990 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1991 mInputStreamInfo.format = zslStream->format;
1992 mInputStreamInfo.usage = zslStream->usage;
1993 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1994 mInputStreamInfo.dim.width,
1995 mInputStreamInfo.dim.height,
1996 mInputStreamInfo.format, mInputStreamInfo.usage);
1997 }
1998
1999 cleanAndSortStreamInfo();
2000 if (mMetadataChannel) {
2001 delete mMetadataChannel;
2002 mMetadataChannel = NULL;
2003 }
2004 if (mSupportChannel) {
2005 delete mSupportChannel;
2006 mSupportChannel = NULL;
2007 }
2008
2009 if (mAnalysisChannel) {
2010 delete mAnalysisChannel;
2011 mAnalysisChannel = NULL;
2012 }
2013
2014 if (mDummyBatchChannel) {
2015 delete mDummyBatchChannel;
2016 mDummyBatchChannel = NULL;
2017 }
2018
Emilian Peev7650c122017-01-19 08:24:33 -08002019 if (mDepthChannel) {
2020 mDepthChannel = NULL;
2021 }
2022
Thierry Strudel2896d122017-02-23 19:18:03 -08002023 char is_type_value[PROPERTY_VALUE_MAX];
2024 property_get("persist.camera.is_type", is_type_value, "4");
2025 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2026
Thierry Strudel3d639192016-09-09 11:52:26 -07002027 //Create metadata channel and initialize it
2028 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2029 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2030 gCamCapability[mCameraId]->color_arrangement);
2031 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2032 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002033 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002034 if (mMetadataChannel == NULL) {
2035 LOGE("failed to allocate metadata channel");
2036 rc = -ENOMEM;
2037 pthread_mutex_unlock(&mMutex);
2038 return rc;
2039 }
2040 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2041 if (rc < 0) {
2042 LOGE("metadata channel initialization failed");
2043 delete mMetadataChannel;
2044 mMetadataChannel = NULL;
2045 pthread_mutex_unlock(&mMutex);
2046 return rc;
2047 }
2048
Thierry Strudel2896d122017-02-23 19:18:03 -08002049 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002050 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002051 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002052 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2053 /* Allocate channel objects for the requested streams */
2054 for (size_t i = 0; i < streamList->num_streams; i++) {
2055 camera3_stream_t *newStream = streamList->streams[i];
2056 uint32_t stream_usage = newStream->usage;
2057 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2058 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2059 struct camera_info *p_info = NULL;
2060 pthread_mutex_lock(&gCamLock);
2061 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2062 pthread_mutex_unlock(&gCamLock);
2063 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2064 || IS_USAGE_ZSL(newStream->usage)) &&
2065 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002066 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002067 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002068 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2069 if (bUseCommonFeatureMask)
2070 zsl_ppmask = commonFeatureMask;
2071 else
2072 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002073 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002074 if (numStreamsOnEncoder > 0)
2075 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2076 else
2077 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002079 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002080 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002081 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002082 LOGH("Input stream configured, reprocess config");
2083 } else {
2084 //for non zsl streams find out the format
2085 switch (newStream->format) {
2086 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2087 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002088 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2090 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2091 /* add additional features to pp feature mask */
2092 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2093 mStreamConfigInfo.num_streams);
2094
2095 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2096 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2097 CAM_STREAM_TYPE_VIDEO;
2098 if (m_bTnrEnabled && m_bTnrVideo) {
2099 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2100 CAM_QCOM_FEATURE_CPP_TNR;
2101 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2102 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2103 ~CAM_QCOM_FEATURE_CDS;
2104 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002105 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2106 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2107 CAM_QTI_FEATURE_PPEISCORE;
2108 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002109 } else {
2110 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2111 CAM_STREAM_TYPE_PREVIEW;
2112 if (m_bTnrEnabled && m_bTnrPreview) {
2113 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2114 CAM_QCOM_FEATURE_CPP_TNR;
2115 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2116 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2117 ~CAM_QCOM_FEATURE_CDS;
2118 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002119 if(!m_bSwTnrPreview) {
2120 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2121 ~CAM_QTI_FEATURE_SW_TNR;
2122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 padding_info.width_padding = mSurfaceStridePadding;
2124 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002125 previewSize.width = (int32_t)newStream->width;
2126 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002127 }
2128 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2129 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2130 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2131 newStream->height;
2132 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2133 newStream->width;
2134 }
2135 }
2136 break;
2137 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002138 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002139 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2140 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2141 if (bUseCommonFeatureMask)
2142 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2143 commonFeatureMask;
2144 else
2145 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2146 CAM_QCOM_FEATURE_NONE;
2147 } else {
2148 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2149 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2150 }
2151 break;
2152 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002153 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002154 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2155 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2156 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2157 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2158 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002159 /* Remove rotation if it is not supported
2160 for 4K LiveVideo snapshot case (online processing) */
2161 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2162 CAM_QCOM_FEATURE_ROTATION)) {
2163 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2164 &= ~CAM_QCOM_FEATURE_ROTATION;
2165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002166 } else {
2167 if (bUseCommonFeatureMask &&
2168 isOnEncoder(maxViewfinderSize, newStream->width,
2169 newStream->height)) {
2170 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2171 } else {
2172 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2173 }
2174 }
2175 if (isZsl) {
2176 if (zslStream) {
2177 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2178 (int32_t)zslStream->width;
2179 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2180 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002181 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2182 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 } else {
2184 LOGE("Error, No ZSL stream identified");
2185 pthread_mutex_unlock(&mMutex);
2186 return -EINVAL;
2187 }
2188 } else if (m_bIs4KVideo) {
2189 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2190 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2191 } else if (bYuv888OverrideJpeg) {
2192 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2193 (int32_t)largeYuv888Size.width;
2194 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2195 (int32_t)largeYuv888Size.height;
2196 }
2197 break;
2198 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2199 case HAL_PIXEL_FORMAT_RAW16:
2200 case HAL_PIXEL_FORMAT_RAW10:
2201 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2202 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2203 isRawStreamRequested = true;
2204 break;
2205 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002206 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002207 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2208 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2209 break;
2210 }
2211 }
2212
2213 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2214 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2215 gCamCapability[mCameraId]->color_arrangement);
2216
2217 if (newStream->priv == NULL) {
2218 //New stream, construct channel
2219 switch (newStream->stream_type) {
2220 case CAMERA3_STREAM_INPUT:
2221 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2222 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2223 break;
2224 case CAMERA3_STREAM_BIDIRECTIONAL:
2225 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2226 GRALLOC_USAGE_HW_CAMERA_WRITE;
2227 break;
2228 case CAMERA3_STREAM_OUTPUT:
2229 /* For video encoding stream, set read/write rarely
2230 * flag so that they may be set to un-cached */
2231 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2232 newStream->usage |=
2233 (GRALLOC_USAGE_SW_READ_RARELY |
2234 GRALLOC_USAGE_SW_WRITE_RARELY |
2235 GRALLOC_USAGE_HW_CAMERA_WRITE);
2236 else if (IS_USAGE_ZSL(newStream->usage))
2237 {
2238 LOGD("ZSL usage flag skipping");
2239 }
2240 else if (newStream == zslStream
2241 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2242 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2243 } else
2244 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2245 break;
2246 default:
2247 LOGE("Invalid stream_type %d", newStream->stream_type);
2248 break;
2249 }
2250
2251 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2252 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2253 QCamera3ProcessingChannel *channel = NULL;
2254 switch (newStream->format) {
2255 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2256 if ((newStream->usage &
2257 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2258 (streamList->operation_mode ==
2259 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2260 ) {
2261 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2262 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002263 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002264 this,
2265 newStream,
2266 (cam_stream_type_t)
2267 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2268 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2269 mMetadataChannel,
2270 0); //heap buffers are not required for HFR video channel
2271 if (channel == NULL) {
2272 LOGE("allocation of channel failed");
2273 pthread_mutex_unlock(&mMutex);
2274 return -ENOMEM;
2275 }
2276 //channel->getNumBuffers() will return 0 here so use
2277 //MAX_INFLIGH_HFR_REQUESTS
2278 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2279 newStream->priv = channel;
2280 LOGI("num video buffers in HFR mode: %d",
2281 MAX_INFLIGHT_HFR_REQUESTS);
2282 } else {
2283 /* Copy stream contents in HFR preview only case to create
2284 * dummy batch channel so that sensor streaming is in
2285 * HFR mode */
2286 if (!m_bIsVideo && (streamList->operation_mode ==
2287 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2288 mDummyBatchStream = *newStream;
2289 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002290 int bufferCount = MAX_INFLIGHT_REQUESTS;
2291 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2292 CAM_STREAM_TYPE_VIDEO) {
2293 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2294 bufferCount = MAX_VIDEO_BUFFERS;
2295 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2297 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002298 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002299 this,
2300 newStream,
2301 (cam_stream_type_t)
2302 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2303 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2304 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002305 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002306 if (channel == NULL) {
2307 LOGE("allocation of channel failed");
2308 pthread_mutex_unlock(&mMutex);
2309 return -ENOMEM;
2310 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002311 /* disable UBWC for preview, though supported,
2312 * to take advantage of CPP duplication */
2313 if (m_bIsVideo && (!mCommon.isVideoUBWCEnabled()) &&
2314 (previewSize.width == (int32_t)videoWidth)&&
2315 (previewSize.height == (int32_t)videoHeight)){
2316 channel->setUBWCEnabled(false);
2317 }else {
2318 channel->setUBWCEnabled(true);
2319 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 newStream->max_buffers = channel->getNumBuffers();
2321 newStream->priv = channel;
2322 }
2323 break;
2324 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2325 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2326 mChannelHandle,
2327 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002328 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 this,
2330 newStream,
2331 (cam_stream_type_t)
2332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2333 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2334 mMetadataChannel);
2335 if (channel == NULL) {
2336 LOGE("allocation of YUV channel failed");
2337 pthread_mutex_unlock(&mMutex);
2338 return -ENOMEM;
2339 }
2340 newStream->max_buffers = channel->getNumBuffers();
2341 newStream->priv = channel;
2342 break;
2343 }
2344 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2345 case HAL_PIXEL_FORMAT_RAW16:
2346 case HAL_PIXEL_FORMAT_RAW10:
2347 mRawChannel = new QCamera3RawChannel(
2348 mCameraHandle->camera_handle, mChannelHandle,
2349 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002350 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 this, newStream,
2352 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2353 mMetadataChannel,
2354 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2355 if (mRawChannel == NULL) {
2356 LOGE("allocation of raw channel failed");
2357 pthread_mutex_unlock(&mMutex);
2358 return -ENOMEM;
2359 }
2360 newStream->max_buffers = mRawChannel->getNumBuffers();
2361 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2362 break;
2363 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002364 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2365 mDepthChannel = new QCamera3DepthChannel(
2366 mCameraHandle->camera_handle, mChannelHandle,
2367 mCameraHandle->ops, NULL, NULL, &padding_info,
2368 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2369 mMetadataChannel);
2370 if (NULL == mDepthChannel) {
2371 LOGE("Allocation of depth channel failed");
2372 pthread_mutex_unlock(&mMutex);
2373 return NO_MEMORY;
2374 }
2375 newStream->priv = mDepthChannel;
2376 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2377 } else {
2378 // Max live snapshot inflight buffer is 1. This is to mitigate
2379 // frame drop issues for video snapshot. The more buffers being
2380 // allocated, the more frame drops there are.
2381 mPictureChannel = new QCamera3PicChannel(
2382 mCameraHandle->camera_handle, mChannelHandle,
2383 mCameraHandle->ops, captureResultCb,
2384 setBufferErrorStatus, &padding_info, this, newStream,
2385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2386 m_bIs4KVideo, isZsl, mMetadataChannel,
2387 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2388 if (mPictureChannel == NULL) {
2389 LOGE("allocation of channel failed");
2390 pthread_mutex_unlock(&mMutex);
2391 return -ENOMEM;
2392 }
2393 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2394 newStream->max_buffers = mPictureChannel->getNumBuffers();
2395 mPictureChannel->overrideYuvSize(
2396 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2397 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 break;
2400
2401 default:
2402 LOGE("not a supported format 0x%x", newStream->format);
2403 break;
2404 }
2405 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2406 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2407 } else {
2408 LOGE("Error, Unknown stream type");
2409 pthread_mutex_unlock(&mMutex);
2410 return -EINVAL;
2411 }
2412
2413 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2414 if (channel != NULL && channel->isUBWCEnabled()) {
2415 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002416 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2417 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002418 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2419 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2420 }
2421 }
2422
2423 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2424 it != mStreamInfo.end(); it++) {
2425 if ((*it)->stream == newStream) {
2426 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2427 break;
2428 }
2429 }
2430 } else {
2431 // Channel already exists for this stream
2432 // Do nothing for now
2433 }
2434 padding_info = gCamCapability[mCameraId]->padding_info;
2435
Emilian Peev7650c122017-01-19 08:24:33 -08002436 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 * since there is no real stream associated with it
2438 */
Emilian Peev7650c122017-01-19 08:24:33 -08002439 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2440 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002442 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002443 }
2444
Thierry Strudel2896d122017-02-23 19:18:03 -08002445 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2446 onlyRaw = false;
2447 }
2448
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002449 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002450 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002451 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002452 cam_analysis_info_t analysisInfo;
2453 int32_t ret = NO_ERROR;
2454 ret = mCommon.getAnalysisInfo(
2455 FALSE,
2456 analysisFeatureMask,
2457 &analysisInfo);
2458 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002459 cam_color_filter_arrangement_t analysis_color_arrangement =
2460 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2461 CAM_FILTER_ARRANGEMENT_Y :
2462 gCamCapability[mCameraId]->color_arrangement);
2463 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2464 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002465 cam_dimension_t analysisDim;
2466 analysisDim = mCommon.getMatchingDimension(previewSize,
2467 analysisInfo.analysis_recommended_res);
2468
2469 mAnalysisChannel = new QCamera3SupportChannel(
2470 mCameraHandle->camera_handle,
2471 mChannelHandle,
2472 mCameraHandle->ops,
2473 &analysisInfo.analysis_padding_info,
2474 analysisFeatureMask,
2475 CAM_STREAM_TYPE_ANALYSIS,
2476 &analysisDim,
2477 (analysisInfo.analysis_format
2478 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2479 : CAM_FORMAT_YUV_420_NV21),
2480 analysisInfo.hw_analysis_supported,
2481 gCamCapability[mCameraId]->color_arrangement,
2482 this,
2483 0); // force buffer count to 0
2484 } else {
2485 LOGW("getAnalysisInfo failed, ret = %d", ret);
2486 }
2487 if (!mAnalysisChannel) {
2488 LOGW("Analysis channel cannot be created");
2489 }
2490 }
2491
Thierry Strudel3d639192016-09-09 11:52:26 -07002492 //RAW DUMP channel
2493 if (mEnableRawDump && isRawStreamRequested == false){
2494 cam_dimension_t rawDumpSize;
2495 rawDumpSize = getMaxRawSize(mCameraId);
2496 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2497 setPAAFSupport(rawDumpFeatureMask,
2498 CAM_STREAM_TYPE_RAW,
2499 gCamCapability[mCameraId]->color_arrangement);
2500 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2501 mChannelHandle,
2502 mCameraHandle->ops,
2503 rawDumpSize,
2504 &padding_info,
2505 this, rawDumpFeatureMask);
2506 if (!mRawDumpChannel) {
2507 LOGE("Raw Dump channel cannot be created");
2508 pthread_mutex_unlock(&mMutex);
2509 return -ENOMEM;
2510 }
2511 }
2512
Chien-Yu Chenee335912017-02-09 17:53:20 -08002513 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002514 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002515 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002516 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2517 "HDR+ RAW source channel is not created.",
2518 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002519 } else {
2520 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2521 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2522 setPAAFSupport(hdrPlusRawFeatureMask,
2523 CAM_STREAM_TYPE_RAW,
2524 gCamCapability[mCameraId]->color_arrangement);
2525 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2526 mChannelHandle,
2527 mCameraHandle->ops,
2528 rawSize,
2529 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002530 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002531 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002532 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002533 if (!mHdrPlusRawSrcChannel) {
2534 LOGE("HDR+ Raw Source channel cannot be created");
2535 pthread_mutex_unlock(&mMutex);
2536 return -ENOMEM;
2537 }
2538 }
2539 }
2540
Thierry Strudel3d639192016-09-09 11:52:26 -07002541 if (mAnalysisChannel) {
2542 cam_analysis_info_t analysisInfo;
2543 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2544 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2545 CAM_STREAM_TYPE_ANALYSIS;
2546 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2547 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002548 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002549 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2550 &analysisInfo);
2551 if (rc != NO_ERROR) {
2552 LOGE("getAnalysisInfo failed, ret = %d", rc);
2553 pthread_mutex_unlock(&mMutex);
2554 return rc;
2555 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002556 cam_color_filter_arrangement_t analysis_color_arrangement =
2557 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2558 CAM_FILTER_ARRANGEMENT_Y :
2559 gCamCapability[mCameraId]->color_arrangement);
2560 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2561 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2562 analysis_color_arrangement);
2563
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002565 mCommon.getMatchingDimension(previewSize,
2566 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002567 mStreamConfigInfo.num_streams++;
2568 }
2569
Thierry Strudel2896d122017-02-23 19:18:03 -08002570 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 cam_analysis_info_t supportInfo;
2572 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2573 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2574 setPAAFSupport(callbackFeatureMask,
2575 CAM_STREAM_TYPE_CALLBACK,
2576 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002577 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002578 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002579 if (ret != NO_ERROR) {
2580 /* Ignore the error for Mono camera
2581 * because the PAAF bit mask is only set
2582 * for CAM_STREAM_TYPE_ANALYSIS stream type
2583 */
2584 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2585 LOGW("getAnalysisInfo failed, ret = %d", ret);
2586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002587 }
2588 mSupportChannel = new QCamera3SupportChannel(
2589 mCameraHandle->camera_handle,
2590 mChannelHandle,
2591 mCameraHandle->ops,
2592 &gCamCapability[mCameraId]->padding_info,
2593 callbackFeatureMask,
2594 CAM_STREAM_TYPE_CALLBACK,
2595 &QCamera3SupportChannel::kDim,
2596 CAM_FORMAT_YUV_420_NV21,
2597 supportInfo.hw_analysis_supported,
2598 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002599 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 if (!mSupportChannel) {
2601 LOGE("dummy channel cannot be created");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 }
2606
2607 if (mSupportChannel) {
2608 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2609 QCamera3SupportChannel::kDim;
2610 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2611 CAM_STREAM_TYPE_CALLBACK;
2612 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2613 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2614 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2615 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2616 gCamCapability[mCameraId]->color_arrangement);
2617 mStreamConfigInfo.num_streams++;
2618 }
2619
2620 if (mRawDumpChannel) {
2621 cam_dimension_t rawSize;
2622 rawSize = getMaxRawSize(mCameraId);
2623 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2624 rawSize;
2625 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2626 CAM_STREAM_TYPE_RAW;
2627 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2628 CAM_QCOM_FEATURE_NONE;
2629 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2630 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2631 gCamCapability[mCameraId]->color_arrangement);
2632 mStreamConfigInfo.num_streams++;
2633 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002634
2635 if (mHdrPlusRawSrcChannel) {
2636 cam_dimension_t rawSize;
2637 rawSize = getMaxRawSize(mCameraId);
2638 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2639 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2640 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2641 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2642 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2643 gCamCapability[mCameraId]->color_arrangement);
2644 mStreamConfigInfo.num_streams++;
2645 }
2646
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 /* In HFR mode, if video stream is not added, create a dummy channel so that
2648 * ISP can create a batch mode even for preview only case. This channel is
2649 * never 'start'ed (no stream-on), it is only 'initialized' */
2650 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2651 !m_bIsVideo) {
2652 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2653 setPAAFSupport(dummyFeatureMask,
2654 CAM_STREAM_TYPE_VIDEO,
2655 gCamCapability[mCameraId]->color_arrangement);
2656 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2657 mChannelHandle,
2658 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002659 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 this,
2661 &mDummyBatchStream,
2662 CAM_STREAM_TYPE_VIDEO,
2663 dummyFeatureMask,
2664 mMetadataChannel);
2665 if (NULL == mDummyBatchChannel) {
2666 LOGE("creation of mDummyBatchChannel failed."
2667 "Preview will use non-hfr sensor mode ");
2668 }
2669 }
2670 if (mDummyBatchChannel) {
2671 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2672 mDummyBatchStream.width;
2673 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2674 mDummyBatchStream.height;
2675 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2676 CAM_STREAM_TYPE_VIDEO;
2677 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2678 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2679 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2680 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2681 gCamCapability[mCameraId]->color_arrangement);
2682 mStreamConfigInfo.num_streams++;
2683 }
2684
2685 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2686 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002687 m_bIs4KVideo ? 0 :
2688 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002689
2690 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2691 for (pendingRequestIterator i = mPendingRequestsList.begin();
2692 i != mPendingRequestsList.end();) {
2693 i = erasePendingRequest(i);
2694 }
2695 mPendingFrameDropList.clear();
2696 // Initialize/Reset the pending buffers list
2697 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2698 req.mPendingBufferList.clear();
2699 }
2700 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2701
Thierry Strudel3d639192016-09-09 11:52:26 -07002702 mCurJpegMeta.clear();
2703 //Get min frame duration for this streams configuration
2704 deriveMinFrameDuration();
2705
Chien-Yu Chenee335912017-02-09 17:53:20 -08002706 mFirstPreviewIntentSeen = false;
2707
2708 // Disable HRD+ if it's enabled;
2709 disableHdrPlusModeLocked();
2710
Thierry Strudel3d639192016-09-09 11:52:26 -07002711 // Update state
2712 mState = CONFIGURED;
2713
2714 pthread_mutex_unlock(&mMutex);
2715
2716 return rc;
2717}
2718
2719/*===========================================================================
2720 * FUNCTION : validateCaptureRequest
2721 *
2722 * DESCRIPTION: validate a capture request from camera service
2723 *
2724 * PARAMETERS :
2725 * @request : request from framework to process
2726 *
2727 * RETURN :
2728 *
2729 *==========================================================================*/
2730int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002731 camera3_capture_request_t *request,
2732 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002733{
2734 ssize_t idx = 0;
2735 const camera3_stream_buffer_t *b;
2736 CameraMetadata meta;
2737
2738 /* Sanity check the request */
2739 if (request == NULL) {
2740 LOGE("NULL capture request");
2741 return BAD_VALUE;
2742 }
2743
2744 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2745 /*settings cannot be null for the first request*/
2746 return BAD_VALUE;
2747 }
2748
2749 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002750 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2751 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 LOGE("Request %d: No output buffers provided!",
2753 __FUNCTION__, frameNumber);
2754 return BAD_VALUE;
2755 }
2756 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2757 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2758 request->num_output_buffers, MAX_NUM_STREAMS);
2759 return BAD_VALUE;
2760 }
2761 if (request->input_buffer != NULL) {
2762 b = request->input_buffer;
2763 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2764 LOGE("Request %d: Buffer %ld: Status not OK!",
2765 frameNumber, (long)idx);
2766 return BAD_VALUE;
2767 }
2768 if (b->release_fence != -1) {
2769 LOGE("Request %d: Buffer %ld: Has a release fence!",
2770 frameNumber, (long)idx);
2771 return BAD_VALUE;
2772 }
2773 if (b->buffer == NULL) {
2774 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2775 frameNumber, (long)idx);
2776 return BAD_VALUE;
2777 }
2778 }
2779
2780 // Validate all buffers
2781 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002782 if (b == NULL) {
2783 return BAD_VALUE;
2784 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002785 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002786 QCamera3ProcessingChannel *channel =
2787 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2788 if (channel == NULL) {
2789 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2790 frameNumber, (long)idx);
2791 return BAD_VALUE;
2792 }
2793 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2794 LOGE("Request %d: Buffer %ld: Status not OK!",
2795 frameNumber, (long)idx);
2796 return BAD_VALUE;
2797 }
2798 if (b->release_fence != -1) {
2799 LOGE("Request %d: Buffer %ld: Has a release fence!",
2800 frameNumber, (long)idx);
2801 return BAD_VALUE;
2802 }
2803 if (b->buffer == NULL) {
2804 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2805 frameNumber, (long)idx);
2806 return BAD_VALUE;
2807 }
2808 if (*(b->buffer) == NULL) {
2809 LOGE("Request %d: Buffer %ld: NULL private handle!",
2810 frameNumber, (long)idx);
2811 return BAD_VALUE;
2812 }
2813 idx++;
2814 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002815 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002816 return NO_ERROR;
2817}
2818
2819/*===========================================================================
2820 * FUNCTION : deriveMinFrameDuration
2821 *
2822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2823 * on currently configured streams.
2824 *
2825 * PARAMETERS : NONE
2826 *
2827 * RETURN : NONE
2828 *
2829 *==========================================================================*/
2830void QCamera3HardwareInterface::deriveMinFrameDuration()
2831{
2832 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2833
2834 maxJpegDim = 0;
2835 maxProcessedDim = 0;
2836 maxRawDim = 0;
2837
2838 // Figure out maximum jpeg, processed, and raw dimensions
2839 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2840 it != mStreamInfo.end(); it++) {
2841
2842 // Input stream doesn't have valid stream_type
2843 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2844 continue;
2845
2846 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2847 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2848 if (dimension > maxJpegDim)
2849 maxJpegDim = dimension;
2850 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2851 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2852 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2853 if (dimension > maxRawDim)
2854 maxRawDim = dimension;
2855 } else {
2856 if (dimension > maxProcessedDim)
2857 maxProcessedDim = dimension;
2858 }
2859 }
2860
2861 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2862 MAX_SIZES_CNT);
2863
2864 //Assume all jpeg dimensions are in processed dimensions.
2865 if (maxJpegDim > maxProcessedDim)
2866 maxProcessedDim = maxJpegDim;
2867 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2868 if (maxProcessedDim > maxRawDim) {
2869 maxRawDim = INT32_MAX;
2870
2871 for (size_t i = 0; i < count; i++) {
2872 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2873 gCamCapability[mCameraId]->raw_dim[i].height;
2874 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2875 maxRawDim = dimension;
2876 }
2877 }
2878
2879 //Find minimum durations for processed, jpeg, and raw
2880 for (size_t i = 0; i < count; i++) {
2881 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2882 gCamCapability[mCameraId]->raw_dim[i].height) {
2883 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2884 break;
2885 }
2886 }
2887 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2888 for (size_t i = 0; i < count; i++) {
2889 if (maxProcessedDim ==
2890 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2891 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2892 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2893 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2894 break;
2895 }
2896 }
2897}
2898
2899/*===========================================================================
2900 * FUNCTION : getMinFrameDuration
2901 *
2902 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2903 * and current request configuration.
2904 *
2905 * PARAMETERS : @request: requset sent by the frameworks
2906 *
2907 * RETURN : min farme duration for a particular request
2908 *
2909 *==========================================================================*/
2910int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2911{
2912 bool hasJpegStream = false;
2913 bool hasRawStream = false;
2914 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2915 const camera3_stream_t *stream = request->output_buffers[i].stream;
2916 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2917 hasJpegStream = true;
2918 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2919 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2920 stream->format == HAL_PIXEL_FORMAT_RAW16)
2921 hasRawStream = true;
2922 }
2923
2924 if (!hasJpegStream)
2925 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2926 else
2927 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2928}
2929
2930/*===========================================================================
2931 * FUNCTION : handleBuffersDuringFlushLock
2932 *
2933 * DESCRIPTION: Account for buffers returned from back-end during flush
2934 * This function is executed while mMutex is held by the caller.
2935 *
2936 * PARAMETERS :
2937 * @buffer: image buffer for the callback
2938 *
2939 * RETURN :
2940 *==========================================================================*/
2941void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2942{
2943 bool buffer_found = false;
2944 for (List<PendingBuffersInRequest>::iterator req =
2945 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2946 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2947 for (List<PendingBufferInfo>::iterator i =
2948 req->mPendingBufferList.begin();
2949 i != req->mPendingBufferList.end(); i++) {
2950 if (i->buffer == buffer->buffer) {
2951 mPendingBuffersMap.numPendingBufsAtFlush--;
2952 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2953 buffer->buffer, req->frame_number,
2954 mPendingBuffersMap.numPendingBufsAtFlush);
2955 buffer_found = true;
2956 break;
2957 }
2958 }
2959 if (buffer_found) {
2960 break;
2961 }
2962 }
2963 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2964 //signal the flush()
2965 LOGD("All buffers returned to HAL. Continue flush");
2966 pthread_cond_signal(&mBuffersCond);
2967 }
2968}
2969
Thierry Strudel3d639192016-09-09 11:52:26 -07002970/*===========================================================================
2971 * FUNCTION : handleBatchMetadata
2972 *
2973 * DESCRIPTION: Handles metadata buffer callback in batch mode
2974 *
2975 * PARAMETERS : @metadata_buf: metadata buffer
2976 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2977 * the meta buf in this method
2978 *
2979 * RETURN :
2980 *
2981 *==========================================================================*/
2982void QCamera3HardwareInterface::handleBatchMetadata(
2983 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002985 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002986
2987 if (NULL == metadata_buf) {
2988 LOGE("metadata_buf is NULL");
2989 return;
2990 }
2991 /* In batch mode, the metdata will contain the frame number and timestamp of
2992 * the last frame in the batch. Eg: a batch containing buffers from request
2993 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2994 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2995 * multiple process_capture_results */
2996 metadata_buffer_t *metadata =
2997 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2998 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2999 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3000 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3001 uint32_t frame_number = 0, urgent_frame_number = 0;
3002 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3003 bool invalid_metadata = false;
3004 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3005 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003006 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003007
3008 int32_t *p_frame_number_valid =
3009 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3010 uint32_t *p_frame_number =
3011 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3012 int64_t *p_capture_time =
3013 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3014 int32_t *p_urgent_frame_number_valid =
3015 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3016 uint32_t *p_urgent_frame_number =
3017 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3018
3019 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3020 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3021 (NULL == p_urgent_frame_number)) {
3022 LOGE("Invalid metadata");
3023 invalid_metadata = true;
3024 } else {
3025 frame_number_valid = *p_frame_number_valid;
3026 last_frame_number = *p_frame_number;
3027 last_frame_capture_time = *p_capture_time;
3028 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3029 last_urgent_frame_number = *p_urgent_frame_number;
3030 }
3031
3032 /* In batchmode, when no video buffers are requested, set_parms are sent
3033 * for every capture_request. The difference between consecutive urgent
3034 * frame numbers and frame numbers should be used to interpolate the
3035 * corresponding frame numbers and time stamps */
3036 pthread_mutex_lock(&mMutex);
3037 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003038 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3039 if(idx < 0) {
3040 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3041 last_urgent_frame_number);
3042 mState = ERROR;
3043 pthread_mutex_unlock(&mMutex);
3044 return;
3045 }
3046 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003047 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3048 first_urgent_frame_number;
3049
3050 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3051 urgent_frame_number_valid,
3052 first_urgent_frame_number, last_urgent_frame_number);
3053 }
3054
3055 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003056 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3057 if(idx < 0) {
3058 LOGE("Invalid frame number received: %d. Irrecoverable error",
3059 last_frame_number);
3060 mState = ERROR;
3061 pthread_mutex_unlock(&mMutex);
3062 return;
3063 }
3064 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003065 frameNumDiff = last_frame_number + 1 -
3066 first_frame_number;
3067 mPendingBatchMap.removeItem(last_frame_number);
3068
3069 LOGD("frm: valid: %d frm_num: %d - %d",
3070 frame_number_valid,
3071 first_frame_number, last_frame_number);
3072
3073 }
3074 pthread_mutex_unlock(&mMutex);
3075
3076 if (urgent_frame_number_valid || frame_number_valid) {
3077 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3078 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3079 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3080 urgentFrameNumDiff, last_urgent_frame_number);
3081 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3082 LOGE("frameNumDiff: %d frameNum: %d",
3083 frameNumDiff, last_frame_number);
3084 }
3085
3086 for (size_t i = 0; i < loopCount; i++) {
3087 /* handleMetadataWithLock is called even for invalid_metadata for
3088 * pipeline depth calculation */
3089 if (!invalid_metadata) {
3090 /* Infer frame number. Batch metadata contains frame number of the
3091 * last frame */
3092 if (urgent_frame_number_valid) {
3093 if (i < urgentFrameNumDiff) {
3094 urgent_frame_number =
3095 first_urgent_frame_number + i;
3096 LOGD("inferred urgent frame_number: %d",
3097 urgent_frame_number);
3098 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3099 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3100 } else {
3101 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3102 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3103 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3104 }
3105 }
3106
3107 /* Infer frame number. Batch metadata contains frame number of the
3108 * last frame */
3109 if (frame_number_valid) {
3110 if (i < frameNumDiff) {
3111 frame_number = first_frame_number + i;
3112 LOGD("inferred frame_number: %d", frame_number);
3113 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3114 CAM_INTF_META_FRAME_NUMBER, frame_number);
3115 } else {
3116 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3117 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3118 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3119 }
3120 }
3121
3122 if (last_frame_capture_time) {
3123 //Infer timestamp
3124 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003125 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003126 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003127 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003128 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3129 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3130 LOGD("batch capture_time: %lld, capture_time: %lld",
3131 last_frame_capture_time, capture_time);
3132 }
3133 }
3134 pthread_mutex_lock(&mMutex);
3135 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003136 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003137 (i == 0) /* first metadata in the batch metadata */,
3138 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003139 pthread_mutex_unlock(&mMutex);
3140 }
3141
3142 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003143 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003144 mMetadataChannel->bufDone(metadata_buf);
3145 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003146 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003147 }
3148}
3149
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003150void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3151 camera3_error_msg_code_t errorCode)
3152{
3153 camera3_notify_msg_t notify_msg;
3154 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3155 notify_msg.type = CAMERA3_MSG_ERROR;
3156 notify_msg.message.error.error_code = errorCode;
3157 notify_msg.message.error.error_stream = NULL;
3158 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003159 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003160
3161 return;
3162}
Thierry Strudel3d639192016-09-09 11:52:26 -07003163/*===========================================================================
3164 * FUNCTION : handleMetadataWithLock
3165 *
3166 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3167 *
3168 * PARAMETERS : @metadata_buf: metadata buffer
3169 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3170 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003171 * @firstMetadataInBatch: Boolean to indicate whether this is the
3172 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003173 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3174 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003175 *
3176 * RETURN :
3177 *
3178 *==========================================================================*/
3179void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003180 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003181 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003182{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003183 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003184 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3185 //during flush do not send metadata from this thread
3186 LOGD("not sending metadata during flush or when mState is error");
3187 if (free_and_bufdone_meta_buf) {
3188 mMetadataChannel->bufDone(metadata_buf);
3189 free(metadata_buf);
3190 }
3191 return;
3192 }
3193
3194 //not in flush
3195 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3196 int32_t frame_number_valid, urgent_frame_number_valid;
3197 uint32_t frame_number, urgent_frame_number;
3198 int64_t capture_time;
3199 nsecs_t currentSysTime;
3200
3201 int32_t *p_frame_number_valid =
3202 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3203 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3204 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3205 int32_t *p_urgent_frame_number_valid =
3206 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3207 uint32_t *p_urgent_frame_number =
3208 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3209 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3210 metadata) {
3211 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3212 *p_frame_number_valid, *p_frame_number);
3213 }
3214
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003215 camera_metadata_t *resultMetadata = nullptr;
3216
Thierry Strudel3d639192016-09-09 11:52:26 -07003217 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3218 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3219 LOGE("Invalid metadata");
3220 if (free_and_bufdone_meta_buf) {
3221 mMetadataChannel->bufDone(metadata_buf);
3222 free(metadata_buf);
3223 }
3224 goto done_metadata;
3225 }
3226 frame_number_valid = *p_frame_number_valid;
3227 frame_number = *p_frame_number;
3228 capture_time = *p_capture_time;
3229 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3230 urgent_frame_number = *p_urgent_frame_number;
3231 currentSysTime = systemTime(CLOCK_MONOTONIC);
3232
3233 // Detect if buffers from any requests are overdue
3234 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003235 int64_t timeout;
3236 {
3237 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3238 // If there is a pending HDR+ request, the following requests may be blocked until the
3239 // HDR+ request is done. So allow a longer timeout.
3240 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3241 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3242 }
3243
3244 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003245 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003246 assert(missed.stream->priv);
3247 if (missed.stream->priv) {
3248 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3249 assert(ch->mStreams[0]);
3250 if (ch->mStreams[0]) {
3251 LOGE("Cancel missing frame = %d, buffer = %p,"
3252 "stream type = %d, stream format = %d",
3253 req.frame_number, missed.buffer,
3254 ch->mStreams[0]->getMyType(), missed.stream->format);
3255 ch->timeoutFrame(req.frame_number);
3256 }
3257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003258 }
3259 }
3260 }
3261 //Partial result on process_capture_result for timestamp
3262 if (urgent_frame_number_valid) {
3263 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3264 urgent_frame_number, capture_time);
3265
3266 //Recieved an urgent Frame Number, handle it
3267 //using partial results
3268 for (pendingRequestIterator i =
3269 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3270 LOGD("Iterator Frame = %d urgent frame = %d",
3271 i->frame_number, urgent_frame_number);
3272
3273 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3274 (i->partial_result_cnt == 0)) {
3275 LOGE("Error: HAL missed urgent metadata for frame number %d",
3276 i->frame_number);
3277 }
3278
3279 if (i->frame_number == urgent_frame_number &&
3280 i->bUrgentReceived == 0) {
3281
3282 camera3_capture_result_t result;
3283 memset(&result, 0, sizeof(camera3_capture_result_t));
3284
3285 i->partial_result_cnt++;
3286 i->bUrgentReceived = 1;
3287 // Extract 3A metadata
3288 result.result =
3289 translateCbUrgentMetadataToResultMetadata(metadata);
3290 // Populate metadata result
3291 result.frame_number = urgent_frame_number;
3292 result.num_output_buffers = 0;
3293 result.output_buffers = NULL;
3294 result.partial_result = i->partial_result_cnt;
3295
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003296 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003297 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003298 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003299 result.partial_result == PARTIAL_RESULT_COUNT);
3300 }
3301
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003302 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003303 LOGD("urgent frame_number = %u, capture_time = %lld",
3304 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003305 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3306 // Instant AEC settled for this frame.
3307 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3308 mInstantAECSettledFrameNumber = urgent_frame_number;
3309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003310 free_camera_metadata((camera_metadata_t *)result.result);
3311 break;
3312 }
3313 }
3314 }
3315
3316 if (!frame_number_valid) {
3317 LOGD("Not a valid normal frame number, used as SOF only");
3318 if (free_and_bufdone_meta_buf) {
3319 mMetadataChannel->bufDone(metadata_buf);
3320 free(metadata_buf);
3321 }
3322 goto done_metadata;
3323 }
3324 LOGH("valid frame_number = %u, capture_time = %lld",
3325 frame_number, capture_time);
3326
Emilian Peev7650c122017-01-19 08:24:33 -08003327 if (metadata->is_depth_data_valid) {
3328 handleDepthDataLocked(metadata->depth_data, frame_number);
3329 }
3330
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003331 // Check whether any stream buffer corresponding to this is dropped or not
3332 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3333 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3334 for (auto & pendingRequest : mPendingRequestsList) {
3335 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3336 mInstantAECSettledFrameNumber)) {
3337 camera3_notify_msg_t notify_msg = {};
3338 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003339 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003340 QCamera3ProcessingChannel *channel =
3341 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003342 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003343 if (p_cam_frame_drop) {
3344 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003345 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003346 // Got the stream ID for drop frame.
3347 dropFrame = true;
3348 break;
3349 }
3350 }
3351 } else {
3352 // This is instant AEC case.
3353 // For instant AEC drop the stream untill AEC is settled.
3354 dropFrame = true;
3355 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003356
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003357 if (dropFrame) {
3358 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3359 if (p_cam_frame_drop) {
3360 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003361 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003362 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003363 } else {
3364 // For instant AEC, inform frame drop and frame number
3365 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3366 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003367 pendingRequest.frame_number, streamID,
3368 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003369 }
3370 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003371 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003373 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003374 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003375 if (p_cam_frame_drop) {
3376 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003377 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003378 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003379 } else {
3380 // For instant AEC, inform frame drop and frame number
3381 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3382 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003383 pendingRequest.frame_number, streamID,
3384 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003385 }
3386 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003388 PendingFrameDrop.stream_ID = streamID;
3389 // Add the Frame drop info to mPendingFrameDropList
3390 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003391 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003392 }
3393 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003395
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003396 for (auto & pendingRequest : mPendingRequestsList) {
3397 // Find the pending request with the frame number.
3398 if (pendingRequest.frame_number == frame_number) {
3399 // Update the sensor timestamp.
3400 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003401
Thierry Strudel3d639192016-09-09 11:52:26 -07003402
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003403 /* Set the timestamp in display metadata so that clients aware of
3404 private_handle such as VT can use this un-modified timestamps.
3405 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003406 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003407
Thierry Strudel3d639192016-09-09 11:52:26 -07003408 // Find channel requiring metadata, meaning internal offline postprocess
3409 // is needed.
3410 //TODO: for now, we don't support two streams requiring metadata at the same time.
3411 // (because we are not making copies, and metadata buffer is not reference counted.
3412 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003413 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3414 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003415 if (iter->need_metadata) {
3416 internalPproc = true;
3417 QCamera3ProcessingChannel *channel =
3418 (QCamera3ProcessingChannel *)iter->stream->priv;
3419 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003420 if(p_is_metabuf_queued != NULL) {
3421 *p_is_metabuf_queued = true;
3422 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003423 break;
3424 }
3425 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003426 for (auto itr = pendingRequest.internalRequestList.begin();
3427 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003428 if (itr->need_metadata) {
3429 internalPproc = true;
3430 QCamera3ProcessingChannel *channel =
3431 (QCamera3ProcessingChannel *)itr->stream->priv;
3432 channel->queueReprocMetadata(metadata_buf);
3433 break;
3434 }
3435 }
3436
Thierry Strudel54dc9782017-02-15 12:12:10 -08003437 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003438 resultMetadata = translateFromHalMetadata(metadata,
3439 pendingRequest.timestamp, pendingRequest.request_id,
3440 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3441 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003442 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003443 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003444 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003445 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003446 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003447 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003448
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003449 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003450
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003451 if (pendingRequest.blob_request) {
3452 //Dump tuning metadata if enabled and available
3453 char prop[PROPERTY_VALUE_MAX];
3454 memset(prop, 0, sizeof(prop));
3455 property_get("persist.camera.dumpmetadata", prop, "0");
3456 int32_t enabled = atoi(prop);
3457 if (enabled && metadata->is_tuning_params_valid) {
3458 dumpMetadataToFile(metadata->tuning_params,
3459 mMetaFrameCount,
3460 enabled,
3461 "Snapshot",
3462 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003463 }
3464 }
3465
3466 if (!internalPproc) {
3467 LOGD("couldn't find need_metadata for this metadata");
3468 // Return metadata buffer
3469 if (free_and_bufdone_meta_buf) {
3470 mMetadataChannel->bufDone(metadata_buf);
3471 free(metadata_buf);
3472 }
3473 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003474
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003475 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 }
3477 }
3478
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003479 // Try to send out shutter callbacks and capture results.
3480 handlePendingResultsWithLock(frame_number, resultMetadata);
3481 return;
3482
Thierry Strudel3d639192016-09-09 11:52:26 -07003483done_metadata:
3484 for (pendingRequestIterator i = mPendingRequestsList.begin();
3485 i != mPendingRequestsList.end() ;i++) {
3486 i->pipeline_depth++;
3487 }
3488 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3489 unblockRequestIfNecessary();
3490}
3491
3492/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003493 * FUNCTION : handleDepthDataWithLock
3494 *
3495 * DESCRIPTION: Handles incoming depth data
3496 *
3497 * PARAMETERS : @depthData : Depth data
3498 * @frameNumber: Frame number of the incoming depth data
3499 *
3500 * RETURN :
3501 *
3502 *==========================================================================*/
3503void QCamera3HardwareInterface::handleDepthDataLocked(
3504 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3505 uint32_t currentFrameNumber;
3506 buffer_handle_t *depthBuffer;
3507
3508 if (nullptr == mDepthChannel) {
3509 LOGE("Depth channel not present!");
3510 return;
3511 }
3512
3513 camera3_stream_buffer_t resultBuffer =
3514 {.acquire_fence = -1,
3515 .release_fence = -1,
3516 .status = CAMERA3_BUFFER_STATUS_OK,
3517 .buffer = nullptr,
3518 .stream = mDepthChannel->getStream()};
3519 camera3_capture_result_t result =
3520 {.result = nullptr,
3521 .num_output_buffers = 1,
3522 .output_buffers = &resultBuffer,
3523 .partial_result = 0,
3524 .frame_number = 0};
3525
3526 do {
3527 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3528 if (nullptr == depthBuffer) {
3529 break;
3530 }
3531
3532 result.frame_number = currentFrameNumber;
3533 resultBuffer.buffer = depthBuffer;
3534 if (currentFrameNumber == frameNumber) {
3535 int32_t rc = mDepthChannel->populateDepthData(depthData,
3536 frameNumber);
3537 if (NO_ERROR != rc) {
3538 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3539 } else {
3540 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3541 }
3542 } else if (currentFrameNumber > frameNumber) {
3543 break;
3544 } else {
3545 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3546 {{currentFrameNumber, mDepthChannel->getStream(),
3547 CAMERA3_MSG_ERROR_BUFFER}}};
3548 orchestrateNotify(&notify_msg);
3549
3550 LOGE("Depth buffer for frame number: %d is missing "
3551 "returning back!", currentFrameNumber);
3552 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3553 }
3554 mDepthChannel->unmapBuffer(currentFrameNumber);
3555
3556 orchestrateResult(&result);
3557 } while (currentFrameNumber < frameNumber);
3558}
3559
3560/*===========================================================================
3561 * FUNCTION : notifyErrorFoPendingDepthData
3562 *
3563 * DESCRIPTION: Returns error for any pending depth buffers
3564 *
3565 * PARAMETERS : depthCh - depth channel that needs to get flushed
3566 *
3567 * RETURN :
3568 *
3569 *==========================================================================*/
3570void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3571 QCamera3DepthChannel *depthCh) {
3572 uint32_t currentFrameNumber;
3573 buffer_handle_t *depthBuffer;
3574
3575 if (nullptr == depthCh) {
3576 return;
3577 }
3578
3579 camera3_notify_msg_t notify_msg =
3580 {.type = CAMERA3_MSG_ERROR,
3581 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3582 camera3_stream_buffer_t resultBuffer =
3583 {.acquire_fence = -1,
3584 .release_fence = -1,
3585 .buffer = nullptr,
3586 .stream = depthCh->getStream(),
3587 .status = CAMERA3_BUFFER_STATUS_ERROR};
3588 camera3_capture_result_t result =
3589 {.result = nullptr,
3590 .frame_number = 0,
3591 .num_output_buffers = 1,
3592 .partial_result = 0,
3593 .output_buffers = &resultBuffer};
3594
3595 while (nullptr !=
3596 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3597 depthCh->unmapBuffer(currentFrameNumber);
3598
3599 notify_msg.message.error.frame_number = currentFrameNumber;
3600 orchestrateNotify(&notify_msg);
3601
3602 resultBuffer.buffer = depthBuffer;
3603 result.frame_number = currentFrameNumber;
3604 orchestrateResult(&result);
3605 };
3606}
3607
3608/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003609 * FUNCTION : hdrPlusPerfLock
3610 *
3611 * DESCRIPTION: perf lock for HDR+ using custom intent
3612 *
3613 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3614 *
3615 * RETURN : None
3616 *
3617 *==========================================================================*/
3618void QCamera3HardwareInterface::hdrPlusPerfLock(
3619 mm_camera_super_buf_t *metadata_buf)
3620{
3621 if (NULL == metadata_buf) {
3622 LOGE("metadata_buf is NULL");
3623 return;
3624 }
3625 metadata_buffer_t *metadata =
3626 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3627 int32_t *p_frame_number_valid =
3628 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3629 uint32_t *p_frame_number =
3630 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631
3632 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3633 LOGE("%s: Invalid metadata", __func__);
3634 return;
3635 }
3636
3637 //acquire perf lock for 5 sec after the last HDR frame is captured
3638 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3639 if ((p_frame_number != NULL) &&
3640 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003641 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003642 }
3643 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003644}
3645
3646/*===========================================================================
3647 * FUNCTION : handleInputBufferWithLock
3648 *
3649 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3650 *
3651 * PARAMETERS : @frame_number: frame number of the input buffer
3652 *
3653 * RETURN :
3654 *
3655 *==========================================================================*/
3656void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3657{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003658 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003659 pendingRequestIterator i = mPendingRequestsList.begin();
3660 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3661 i++;
3662 }
3663 if (i != mPendingRequestsList.end() && i->input_buffer) {
3664 //found the right request
3665 if (!i->shutter_notified) {
3666 CameraMetadata settings;
3667 camera3_notify_msg_t notify_msg;
3668 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3669 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3670 if(i->settings) {
3671 settings = i->settings;
3672 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3673 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3674 } else {
3675 LOGE("No timestamp in input settings! Using current one.");
3676 }
3677 } else {
3678 LOGE("Input settings missing!");
3679 }
3680
3681 notify_msg.type = CAMERA3_MSG_SHUTTER;
3682 notify_msg.message.shutter.frame_number = frame_number;
3683 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003684 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003685 i->shutter_notified = true;
3686 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3687 i->frame_number, notify_msg.message.shutter.timestamp);
3688 }
3689
3690 if (i->input_buffer->release_fence != -1) {
3691 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3692 close(i->input_buffer->release_fence);
3693 if (rc != OK) {
3694 LOGE("input buffer sync wait failed %d", rc);
3695 }
3696 }
3697
3698 camera3_capture_result result;
3699 memset(&result, 0, sizeof(camera3_capture_result));
3700 result.frame_number = frame_number;
3701 result.result = i->settings;
3702 result.input_buffer = i->input_buffer;
3703 result.partial_result = PARTIAL_RESULT_COUNT;
3704
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003705 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 LOGD("Input request metadata and input buffer frame_number = %u",
3707 i->frame_number);
3708 i = erasePendingRequest(i);
3709 } else {
3710 LOGE("Could not find input request for frame number %d", frame_number);
3711 }
3712}
3713
3714/*===========================================================================
3715 * FUNCTION : handleBufferWithLock
3716 *
3717 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3718 *
3719 * PARAMETERS : @buffer: image buffer for the callback
3720 * @frame_number: frame number of the image buffer
3721 *
3722 * RETURN :
3723 *
3724 *==========================================================================*/
3725void QCamera3HardwareInterface::handleBufferWithLock(
3726 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3727{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003728 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003729
3730 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3731 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3732 }
3733
Thierry Strudel3d639192016-09-09 11:52:26 -07003734 /* Nothing to be done during error state */
3735 if ((ERROR == mState) || (DEINIT == mState)) {
3736 return;
3737 }
3738 if (mFlushPerf) {
3739 handleBuffersDuringFlushLock(buffer);
3740 return;
3741 }
3742 //not in flush
3743 // If the frame number doesn't exist in the pending request list,
3744 // directly send the buffer to the frameworks, and update pending buffers map
3745 // Otherwise, book-keep the buffer.
3746 pendingRequestIterator i = mPendingRequestsList.begin();
3747 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3748 i++;
3749 }
3750 if (i == mPendingRequestsList.end()) {
3751 // Verify all pending requests frame_numbers are greater
3752 for (pendingRequestIterator j = mPendingRequestsList.begin();
3753 j != mPendingRequestsList.end(); j++) {
3754 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3755 LOGW("Error: pending live frame number %d is smaller than %d",
3756 j->frame_number, frame_number);
3757 }
3758 }
3759 camera3_capture_result_t result;
3760 memset(&result, 0, sizeof(camera3_capture_result_t));
3761 result.result = NULL;
3762 result.frame_number = frame_number;
3763 result.num_output_buffers = 1;
3764 result.partial_result = 0;
3765 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3766 m != mPendingFrameDropList.end(); m++) {
3767 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3768 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3769 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3770 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3771 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3772 frame_number, streamID);
3773 m = mPendingFrameDropList.erase(m);
3774 break;
3775 }
3776 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003777 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003778 result.output_buffers = buffer;
3779 LOGH("result frame_number = %d, buffer = %p",
3780 frame_number, buffer->buffer);
3781
3782 mPendingBuffersMap.removeBuf(buffer->buffer);
3783
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003784 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003785 } else {
3786 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003787 if (i->input_buffer->release_fence != -1) {
3788 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3789 close(i->input_buffer->release_fence);
3790 if (rc != OK) {
3791 LOGE("input buffer sync wait failed %d", rc);
3792 }
3793 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003795
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 // Put buffer into the pending request
3797 for (auto &requestedBuffer : i->buffers) {
3798 if (requestedBuffer.stream == buffer->stream) {
3799 if (requestedBuffer.buffer != nullptr) {
3800 LOGE("Error: buffer is already set");
3801 } else {
3802 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3803 sizeof(camera3_stream_buffer_t));
3804 *(requestedBuffer.buffer) = *buffer;
3805 LOGH("cache buffer %p at result frame_number %u",
3806 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003807 }
3808 }
3809 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810
3811 if (i->input_buffer) {
3812 // For a reprocessing request, try to send out shutter callback and result metadata.
3813 handlePendingResultsWithLock(frame_number, nullptr);
3814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003815 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003816
3817 if (mPreviewStarted == false) {
3818 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3819 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3820 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3821 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3822 mPreviewStarted = true;
3823
3824 // Set power hint for preview
3825 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3826 }
3827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003828}
3829
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3831 const camera_metadata_t *resultMetadata)
3832{
3833 // Find the pending request for this result metadata.
3834 auto requestIter = mPendingRequestsList.begin();
3835 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3836 requestIter++;
3837 }
3838
3839 if (requestIter == mPendingRequestsList.end()) {
3840 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3841 return;
3842 }
3843
3844 // Update the result metadata
3845 requestIter->resultMetadata = resultMetadata;
3846
3847 // Check what type of request this is.
3848 bool liveRequest = false;
3849 if (requestIter->hdrplus) {
3850 // HDR+ request doesn't have partial results.
3851 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3852 } else if (requestIter->input_buffer != nullptr) {
3853 // Reprocessing request result is the same as settings.
3854 requestIter->resultMetadata = requestIter->settings;
3855 // Reprocessing request doesn't have partial results.
3856 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3857 } else {
3858 liveRequest = true;
3859 requestIter->partial_result_cnt++;
3860 mPendingLiveRequest--;
3861
3862 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003863 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3864 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003865 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3866 }
3867 }
3868
3869 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3870 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3871 bool readyToSend = true;
3872
3873 // Iterate through the pending requests to send out shutter callbacks and results that are
3874 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3875 // live requests that don't have result metadata yet.
3876 auto iter = mPendingRequestsList.begin();
3877 while (iter != mPendingRequestsList.end()) {
3878 // Check if current pending request is ready. If it's not ready, the following pending
3879 // requests are also not ready.
3880 if (readyToSend && iter->resultMetadata == nullptr) {
3881 readyToSend = false;
3882 }
3883
3884 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3885
3886 std::vector<camera3_stream_buffer_t> outputBuffers;
3887
3888 camera3_capture_result_t result = {};
3889 result.frame_number = iter->frame_number;
3890 result.result = iter->resultMetadata;
3891 result.partial_result = iter->partial_result_cnt;
3892
3893 // If this pending buffer has result metadata, we may be able to send out shutter callback
3894 // and result metadata.
3895 if (iter->resultMetadata != nullptr) {
3896 if (!readyToSend) {
3897 // If any of the previous pending request is not ready, this pending request is
3898 // also not ready to send in order to keep shutter callbacks and result metadata
3899 // in order.
3900 iter++;
3901 continue;
3902 }
3903
3904 // Invoke shutter callback if not yet.
3905 if (!iter->shutter_notified) {
3906 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3907
3908 // Find the timestamp in HDR+ result metadata
3909 camera_metadata_ro_entry_t entry;
3910 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3911 ANDROID_SENSOR_TIMESTAMP, &entry);
3912 if (res != OK) {
3913 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3914 __FUNCTION__, iter->frame_number, strerror(-res), res);
3915 } else {
3916 timestamp = entry.data.i64[0];
3917 }
3918
3919 camera3_notify_msg_t notify_msg = {};
3920 notify_msg.type = CAMERA3_MSG_SHUTTER;
3921 notify_msg.message.shutter.frame_number = iter->frame_number;
3922 notify_msg.message.shutter.timestamp = timestamp;
3923 orchestrateNotify(&notify_msg);
3924 iter->shutter_notified = true;
3925 }
3926
3927 result.input_buffer = iter->input_buffer;
3928
3929 // Prepare output buffer array
3930 for (auto bufferInfoIter = iter->buffers.begin();
3931 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3932 if (bufferInfoIter->buffer != nullptr) {
3933
3934 QCamera3Channel *channel =
3935 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3936 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3937
3938 // Check if this buffer is a dropped frame.
3939 auto frameDropIter = mPendingFrameDropList.begin();
3940 while (frameDropIter != mPendingFrameDropList.end()) {
3941 if((frameDropIter->stream_ID == streamID) &&
3942 (frameDropIter->frame_number == frameNumber)) {
3943 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3944 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3945 streamID);
3946 mPendingFrameDropList.erase(frameDropIter);
3947 break;
3948 } else {
3949 frameDropIter++;
3950 }
3951 }
3952
3953 // Check buffer error status
3954 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3955 bufferInfoIter->buffer->buffer);
3956 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3957
3958 outputBuffers.push_back(*(bufferInfoIter->buffer));
3959 free(bufferInfoIter->buffer);
3960 bufferInfoIter->buffer = NULL;
3961 }
3962 }
3963
3964 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3965 result.num_output_buffers = outputBuffers.size();
3966 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3967 // If the result metadata belongs to a live request, notify errors for previous pending
3968 // live requests.
3969 mPendingLiveRequest--;
3970
3971 CameraMetadata dummyMetadata;
3972 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3973 result.result = dummyMetadata.release();
3974
3975 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3976 } else {
3977 iter++;
3978 continue;
3979 }
3980
3981 orchestrateResult(&result);
3982
3983 // For reprocessing, result metadata is the same as settings so do not free it here to
3984 // avoid double free.
3985 if (result.result != iter->settings) {
3986 free_camera_metadata((camera_metadata_t *)result.result);
3987 }
3988 iter->resultMetadata = nullptr;
3989 iter = erasePendingRequest(iter);
3990 }
3991
3992 if (liveRequest) {
3993 for (auto &iter : mPendingRequestsList) {
3994 // Increment pipeline depth for the following pending requests.
3995 if (iter.frame_number > frameNumber) {
3996 iter.pipeline_depth++;
3997 }
3998 }
3999 }
4000
4001 unblockRequestIfNecessary();
4002}
4003
Thierry Strudel3d639192016-09-09 11:52:26 -07004004/*===========================================================================
4005 * FUNCTION : unblockRequestIfNecessary
4006 *
4007 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4008 * that mMutex is held when this function is called.
4009 *
4010 * PARAMETERS :
4011 *
4012 * RETURN :
4013 *
4014 *==========================================================================*/
4015void QCamera3HardwareInterface::unblockRequestIfNecessary()
4016{
4017 // Unblock process_capture_request
4018 pthread_cond_signal(&mRequestCond);
4019}
4020
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004021/*===========================================================================
4022 * FUNCTION : isHdrSnapshotRequest
4023 *
4024 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4025 *
4026 * PARAMETERS : camera3 request structure
4027 *
4028 * RETURN : boolean decision variable
4029 *
4030 *==========================================================================*/
4031bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4032{
4033 if (request == NULL) {
4034 LOGE("Invalid request handle");
4035 assert(0);
4036 return false;
4037 }
4038
4039 if (!mForceHdrSnapshot) {
4040 CameraMetadata frame_settings;
4041 frame_settings = request->settings;
4042
4043 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4044 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4045 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4046 return false;
4047 }
4048 } else {
4049 return false;
4050 }
4051
4052 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4053 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4054 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4055 return false;
4056 }
4057 } else {
4058 return false;
4059 }
4060 }
4061
4062 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4063 if (request->output_buffers[i].stream->format
4064 == HAL_PIXEL_FORMAT_BLOB) {
4065 return true;
4066 }
4067 }
4068
4069 return false;
4070}
4071/*===========================================================================
4072 * FUNCTION : orchestrateRequest
4073 *
4074 * DESCRIPTION: Orchestrates a capture request from camera service
4075 *
4076 * PARAMETERS :
4077 * @request : request from framework to process
4078 *
4079 * RETURN : Error status codes
4080 *
4081 *==========================================================================*/
4082int32_t QCamera3HardwareInterface::orchestrateRequest(
4083 camera3_capture_request_t *request)
4084{
4085
4086 uint32_t originalFrameNumber = request->frame_number;
4087 uint32_t originalOutputCount = request->num_output_buffers;
4088 const camera_metadata_t *original_settings = request->settings;
4089 List<InternalRequest> internallyRequestedStreams;
4090 List<InternalRequest> emptyInternalList;
4091
4092 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4093 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4094 uint32_t internalFrameNumber;
4095 CameraMetadata modified_meta;
4096
4097
4098 /* Add Blob channel to list of internally requested streams */
4099 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4100 if (request->output_buffers[i].stream->format
4101 == HAL_PIXEL_FORMAT_BLOB) {
4102 InternalRequest streamRequested;
4103 streamRequested.meteringOnly = 1;
4104 streamRequested.need_metadata = 0;
4105 streamRequested.stream = request->output_buffers[i].stream;
4106 internallyRequestedStreams.push_back(streamRequested);
4107 }
4108 }
4109 request->num_output_buffers = 0;
4110 auto itr = internallyRequestedStreams.begin();
4111
4112 /* Modify setting to set compensation */
4113 modified_meta = request->settings;
4114 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4115 uint8_t aeLock = 1;
4116 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4117 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4118 camera_metadata_t *modified_settings = modified_meta.release();
4119 request->settings = modified_settings;
4120
4121 /* Capture Settling & -2x frame */
4122 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4123 request->frame_number = internalFrameNumber;
4124 processCaptureRequest(request, internallyRequestedStreams);
4125
4126 request->num_output_buffers = originalOutputCount;
4127 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4128 request->frame_number = internalFrameNumber;
4129 processCaptureRequest(request, emptyInternalList);
4130 request->num_output_buffers = 0;
4131
4132 modified_meta = modified_settings;
4133 expCompensation = 0;
4134 aeLock = 1;
4135 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4136 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4137 modified_settings = modified_meta.release();
4138 request->settings = modified_settings;
4139
4140 /* Capture Settling & 0X frame */
4141
4142 itr = internallyRequestedStreams.begin();
4143 if (itr == internallyRequestedStreams.end()) {
4144 LOGE("Error Internally Requested Stream list is empty");
4145 assert(0);
4146 } else {
4147 itr->need_metadata = 0;
4148 itr->meteringOnly = 1;
4149 }
4150
4151 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4152 request->frame_number = internalFrameNumber;
4153 processCaptureRequest(request, internallyRequestedStreams);
4154
4155 itr = internallyRequestedStreams.begin();
4156 if (itr == internallyRequestedStreams.end()) {
4157 ALOGE("Error Internally Requested Stream list is empty");
4158 assert(0);
4159 } else {
4160 itr->need_metadata = 1;
4161 itr->meteringOnly = 0;
4162 }
4163
4164 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4165 request->frame_number = internalFrameNumber;
4166 processCaptureRequest(request, internallyRequestedStreams);
4167
4168 /* Capture 2X frame*/
4169 modified_meta = modified_settings;
4170 expCompensation = GB_HDR_2X_STEP_EV;
4171 aeLock = 1;
4172 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4173 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4174 modified_settings = modified_meta.release();
4175 request->settings = modified_settings;
4176
4177 itr = internallyRequestedStreams.begin();
4178 if (itr == internallyRequestedStreams.end()) {
4179 ALOGE("Error Internally Requested Stream list is empty");
4180 assert(0);
4181 } else {
4182 itr->need_metadata = 0;
4183 itr->meteringOnly = 1;
4184 }
4185 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4186 request->frame_number = internalFrameNumber;
4187 processCaptureRequest(request, internallyRequestedStreams);
4188
4189 itr = internallyRequestedStreams.begin();
4190 if (itr == internallyRequestedStreams.end()) {
4191 ALOGE("Error Internally Requested Stream list is empty");
4192 assert(0);
4193 } else {
4194 itr->need_metadata = 1;
4195 itr->meteringOnly = 0;
4196 }
4197
4198 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4199 request->frame_number = internalFrameNumber;
4200 processCaptureRequest(request, internallyRequestedStreams);
4201
4202
4203 /* Capture 2X on original streaming config*/
4204 internallyRequestedStreams.clear();
4205
4206 /* Restore original settings pointer */
4207 request->settings = original_settings;
4208 } else {
4209 uint32_t internalFrameNumber;
4210 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4211 request->frame_number = internalFrameNumber;
4212 return processCaptureRequest(request, internallyRequestedStreams);
4213 }
4214
4215 return NO_ERROR;
4216}
4217
4218/*===========================================================================
4219 * FUNCTION : orchestrateResult
4220 *
4221 * DESCRIPTION: Orchestrates a capture result to camera service
4222 *
4223 * PARAMETERS :
4224 * @request : request from framework to process
4225 *
4226 * RETURN :
4227 *
4228 *==========================================================================*/
4229void QCamera3HardwareInterface::orchestrateResult(
4230 camera3_capture_result_t *result)
4231{
4232 uint32_t frameworkFrameNumber;
4233 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4234 frameworkFrameNumber);
4235 if (rc != NO_ERROR) {
4236 LOGE("Cannot find translated frameworkFrameNumber");
4237 assert(0);
4238 } else {
4239 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004240 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004241 } else {
4242 result->frame_number = frameworkFrameNumber;
4243 mCallbackOps->process_capture_result(mCallbackOps, result);
4244 }
4245 }
4246}
4247
4248/*===========================================================================
4249 * FUNCTION : orchestrateNotify
4250 *
4251 * DESCRIPTION: Orchestrates a notify to camera service
4252 *
4253 * PARAMETERS :
4254 * @request : request from framework to process
4255 *
4256 * RETURN :
4257 *
4258 *==========================================================================*/
4259void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4260{
4261 uint32_t frameworkFrameNumber;
4262 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004263 int32_t rc = NO_ERROR;
4264
4265 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004266 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004267
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004268 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004269 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4270 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4271 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004272 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004273 LOGE("Cannot find translated frameworkFrameNumber");
4274 assert(0);
4275 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004276 }
4277 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004278
4279 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4280 LOGD("Internal Request drop the notifyCb");
4281 } else {
4282 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4283 mCallbackOps->notify(mCallbackOps, notify_msg);
4284 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004285}
4286
4287/*===========================================================================
4288 * FUNCTION : FrameNumberRegistry
4289 *
4290 * DESCRIPTION: Constructor
4291 *
4292 * PARAMETERS :
4293 *
4294 * RETURN :
4295 *
4296 *==========================================================================*/
4297FrameNumberRegistry::FrameNumberRegistry()
4298{
4299 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4300}
4301
4302/*===========================================================================
4303 * FUNCTION : ~FrameNumberRegistry
4304 *
4305 * DESCRIPTION: Destructor
4306 *
4307 * PARAMETERS :
4308 *
4309 * RETURN :
4310 *
4311 *==========================================================================*/
4312FrameNumberRegistry::~FrameNumberRegistry()
4313{
4314}
4315
4316/*===========================================================================
4317 * FUNCTION : PurgeOldEntriesLocked
4318 *
4319 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4320 *
4321 * PARAMETERS :
4322 *
4323 * RETURN : NONE
4324 *
4325 *==========================================================================*/
4326void FrameNumberRegistry::purgeOldEntriesLocked()
4327{
4328 while (_register.begin() != _register.end()) {
4329 auto itr = _register.begin();
4330 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4331 _register.erase(itr);
4332 } else {
4333 return;
4334 }
4335 }
4336}
4337
4338/*===========================================================================
4339 * FUNCTION : allocStoreInternalFrameNumber
4340 *
4341 * DESCRIPTION: Method to note down a framework request and associate a new
4342 * internal request number against it
4343 *
4344 * PARAMETERS :
4345 * @fFrameNumber: Identifier given by framework
4346 * @internalFN : Output parameter which will have the newly generated internal
4347 * entry
4348 *
4349 * RETURN : Error code
4350 *
4351 *==========================================================================*/
4352int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4353 uint32_t &internalFrameNumber)
4354{
4355 Mutex::Autolock lock(mRegistryLock);
4356 internalFrameNumber = _nextFreeInternalNumber++;
4357 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4358 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4359 purgeOldEntriesLocked();
4360 return NO_ERROR;
4361}
4362
4363/*===========================================================================
4364 * FUNCTION : generateStoreInternalFrameNumber
4365 *
4366 * DESCRIPTION: Method to associate a new internal request number independent
4367 * of any associate with framework requests
4368 *
4369 * PARAMETERS :
4370 * @internalFrame#: Output parameter which will have the newly generated internal
4371 *
4372 *
4373 * RETURN : Error code
4374 *
4375 *==========================================================================*/
4376int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4377{
4378 Mutex::Autolock lock(mRegistryLock);
4379 internalFrameNumber = _nextFreeInternalNumber++;
4380 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4381 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4382 purgeOldEntriesLocked();
4383 return NO_ERROR;
4384}
4385
4386/*===========================================================================
4387 * FUNCTION : getFrameworkFrameNumber
4388 *
4389 * DESCRIPTION: Method to query the framework framenumber given an internal #
4390 *
4391 * PARAMETERS :
4392 * @internalFrame#: Internal reference
4393 * @frameworkframenumber: Output parameter holding framework frame entry
4394 *
4395 * RETURN : Error code
4396 *
4397 *==========================================================================*/
4398int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4399 uint32_t &frameworkFrameNumber)
4400{
4401 Mutex::Autolock lock(mRegistryLock);
4402 auto itr = _register.find(internalFrameNumber);
4403 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004404 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004405 return -ENOENT;
4406 }
4407
4408 frameworkFrameNumber = itr->second;
4409 purgeOldEntriesLocked();
4410 return NO_ERROR;
4411}
Thierry Strudel3d639192016-09-09 11:52:26 -07004412
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004413status_t QCamera3HardwareInterface::fillPbStreamConfig(
4414 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4415 QCamera3Channel *channel, uint32_t streamIndex) {
4416 if (config == nullptr) {
4417 LOGE("%s: config is null", __FUNCTION__);
4418 return BAD_VALUE;
4419 }
4420
4421 if (channel == nullptr) {
4422 LOGE("%s: channel is null", __FUNCTION__);
4423 return BAD_VALUE;
4424 }
4425
4426 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4427 if (stream == nullptr) {
4428 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4429 return NAME_NOT_FOUND;
4430 }
4431
4432 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4433 if (streamInfo == nullptr) {
4434 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4435 return NAME_NOT_FOUND;
4436 }
4437
4438 config->id = pbStreamId;
4439 config->image.width = streamInfo->dim.width;
4440 config->image.height = streamInfo->dim.height;
4441 config->image.padding = 0;
4442 config->image.format = pbStreamFormat;
4443
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004444 uint32_t totalPlaneSize = 0;
4445
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004446 // Fill plane information.
4447 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4448 pbcamera::PlaneConfiguration plane;
4449 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4450 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4451 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004452
4453 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004454 }
4455
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004456 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004457 return OK;
4458}
4459
Thierry Strudel3d639192016-09-09 11:52:26 -07004460/*===========================================================================
4461 * FUNCTION : processCaptureRequest
4462 *
4463 * DESCRIPTION: process a capture request from camera service
4464 *
4465 * PARAMETERS :
4466 * @request : request from framework to process
4467 *
4468 * RETURN :
4469 *
4470 *==========================================================================*/
4471int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004472 camera3_capture_request_t *request,
4473 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004474{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004475 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004476 int rc = NO_ERROR;
4477 int32_t request_id;
4478 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004479 bool isVidBufRequested = false;
4480 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004481 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004482
4483 pthread_mutex_lock(&mMutex);
4484
4485 // Validate current state
4486 switch (mState) {
4487 case CONFIGURED:
4488 case STARTED:
4489 /* valid state */
4490 break;
4491
4492 case ERROR:
4493 pthread_mutex_unlock(&mMutex);
4494 handleCameraDeviceError();
4495 return -ENODEV;
4496
4497 default:
4498 LOGE("Invalid state %d", mState);
4499 pthread_mutex_unlock(&mMutex);
4500 return -ENODEV;
4501 }
4502
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004503 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004504 if (rc != NO_ERROR) {
4505 LOGE("incoming request is not valid");
4506 pthread_mutex_unlock(&mMutex);
4507 return rc;
4508 }
4509
4510 meta = request->settings;
4511
4512 // For first capture request, send capture intent, and
4513 // stream on all streams
4514 if (mState == CONFIGURED) {
4515 // send an unconfigure to the backend so that the isp
4516 // resources are deallocated
4517 if (!mFirstConfiguration) {
4518 cam_stream_size_info_t stream_config_info;
4519 int32_t hal_version = CAM_HAL_V3;
4520 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4521 stream_config_info.buffer_info.min_buffers =
4522 MIN_INFLIGHT_REQUESTS;
4523 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004524 m_bIs4KVideo ? 0 :
4525 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004526 clear_metadata_buffer(mParameters);
4527 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4528 CAM_INTF_PARM_HAL_VERSION, hal_version);
4529 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4530 CAM_INTF_META_STREAM_INFO, stream_config_info);
4531 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4532 mParameters);
4533 if (rc < 0) {
4534 LOGE("set_parms for unconfigure failed");
4535 pthread_mutex_unlock(&mMutex);
4536 return rc;
4537 }
4538 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004539 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004540 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004541 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004542 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004543 property_get("persist.camera.is_type", is_type_value, "4");
4544 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4545 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4546 property_get("persist.camera.is_type_preview", is_type_value, "4");
4547 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4548 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004549
4550 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4551 int32_t hal_version = CAM_HAL_V3;
4552 uint8_t captureIntent =
4553 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4554 mCaptureIntent = captureIntent;
4555 clear_metadata_buffer(mParameters);
4556 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4557 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4558 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004559 if (mFirstConfiguration) {
4560 // configure instant AEC
4561 // Instant AEC is a session based parameter and it is needed only
4562 // once per complete session after open camera.
4563 // i.e. This is set only once for the first capture request, after open camera.
4564 setInstantAEC(meta);
4565 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004566 uint8_t fwkVideoStabMode=0;
4567 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4568 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4569 }
4570
4571 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4572 // turn it on for video/preview
4573 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4574 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 int32_t vsMode;
4576 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4578 rc = BAD_VALUE;
4579 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004580 LOGD("setEis %d", setEis);
4581 bool eis3Supported = false;
4582 size_t count = IS_TYPE_MAX;
4583 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4584 for (size_t i = 0; i < count; i++) {
4585 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4586 eis3Supported = true;
4587 break;
4588 }
4589 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004590
4591 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004592 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004593 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4594 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004595 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4596 is_type = isTypePreview;
4597 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4598 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4599 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004600 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004601 } else {
4602 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004603 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004604 } else {
4605 is_type = IS_TYPE_NONE;
4606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004607 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004608 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004609 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4610 }
4611 }
4612
4613 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4614 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4615
Thierry Strudel54dc9782017-02-15 12:12:10 -08004616 //Disable tintless only if the property is set to 0
4617 memset(prop, 0, sizeof(prop));
4618 property_get("persist.camera.tintless.enable", prop, "1");
4619 int32_t tintless_value = atoi(prop);
4620
Thierry Strudel3d639192016-09-09 11:52:26 -07004621 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4622 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004623
Thierry Strudel3d639192016-09-09 11:52:26 -07004624 //Disable CDS for HFR mode or if DIS/EIS is on.
4625 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4626 //after every configure_stream
4627 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4628 (m_bIsVideo)) {
4629 int32_t cds = CAM_CDS_MODE_OFF;
4630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4631 CAM_INTF_PARM_CDS_MODE, cds))
4632 LOGE("Failed to disable CDS for HFR mode");
4633
4634 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004635
4636 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4637 uint8_t* use_av_timer = NULL;
4638
4639 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004640 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004641 use_av_timer = &m_debug_avtimer;
4642 }
4643 else{
4644 use_av_timer =
4645 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004646 if (use_av_timer) {
4647 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4648 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004649 }
4650
4651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4652 rc = BAD_VALUE;
4653 }
4654 }
4655
Thierry Strudel3d639192016-09-09 11:52:26 -07004656 setMobicat();
4657
4658 /* Set fps and hfr mode while sending meta stream info so that sensor
4659 * can configure appropriate streaming mode */
4660 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004661 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4662 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004663 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4664 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004665 if (rc == NO_ERROR) {
4666 int32_t max_fps =
4667 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004668 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004669 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4670 }
4671 /* For HFR, more buffers are dequeued upfront to improve the performance */
4672 if (mBatchSize) {
4673 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4674 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4675 }
4676 }
4677 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004678 LOGE("setHalFpsRange failed");
4679 }
4680 }
4681 if (meta.exists(ANDROID_CONTROL_MODE)) {
4682 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4683 rc = extractSceneMode(meta, metaMode, mParameters);
4684 if (rc != NO_ERROR) {
4685 LOGE("extractSceneMode failed");
4686 }
4687 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004688 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004689
Thierry Strudel04e026f2016-10-10 11:27:36 -07004690 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4691 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4692 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4693 rc = setVideoHdrMode(mParameters, vhdr);
4694 if (rc != NO_ERROR) {
4695 LOGE("setVideoHDR is failed");
4696 }
4697 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004698
Thierry Strudel3d639192016-09-09 11:52:26 -07004699 //TODO: validate the arguments, HSV scenemode should have only the
4700 //advertised fps ranges
4701
4702 /*set the capture intent, hal version, tintless, stream info,
4703 *and disenable parameters to the backend*/
4704 LOGD("set_parms META_STREAM_INFO " );
4705 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004706 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4707 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004708 mStreamConfigInfo.type[i],
4709 mStreamConfigInfo.stream_sizes[i].width,
4710 mStreamConfigInfo.stream_sizes[i].height,
4711 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004712 mStreamConfigInfo.format[i],
4713 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004714 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004715
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4717 mParameters);
4718 if (rc < 0) {
4719 LOGE("set_parms failed for hal version, stream info");
4720 }
4721
Chien-Yu Chenee335912017-02-09 17:53:20 -08004722 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4723 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004724 if (rc != NO_ERROR) {
4725 LOGE("Failed to get sensor output size");
4726 pthread_mutex_unlock(&mMutex);
4727 goto error_exit;
4728 }
4729
4730 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4731 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004732 mSensorModeInfo.active_array_size.width,
4733 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004734
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004735 if (gHdrPlusClient != nullptr) {
4736 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4737 if (rc != OK) {
4738 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4739 mCameraId, mSensorModeInfo.op_pixel_clk);
4740 pthread_mutex_unlock(&mMutex);
4741 goto error_exit;
4742 }
4743 }
4744
Thierry Strudel3d639192016-09-09 11:52:26 -07004745 /* Set batchmode before initializing channel. Since registerBuffer
4746 * internally initializes some of the channels, better set batchmode
4747 * even before first register buffer */
4748 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4749 it != mStreamInfo.end(); it++) {
4750 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4751 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4752 && mBatchSize) {
4753 rc = channel->setBatchSize(mBatchSize);
4754 //Disable per frame map unmap for HFR/batchmode case
4755 rc |= channel->setPerFrameMapUnmap(false);
4756 if (NO_ERROR != rc) {
4757 LOGE("Channel init failed %d", rc);
4758 pthread_mutex_unlock(&mMutex);
4759 goto error_exit;
4760 }
4761 }
4762 }
4763
4764 //First initialize all streams
4765 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4766 it != mStreamInfo.end(); it++) {
4767 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4768 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4769 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004770 setEis) {
4771 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4772 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4773 is_type = mStreamConfigInfo.is_type[i];
4774 break;
4775 }
4776 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004778 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004779 rc = channel->initialize(IS_TYPE_NONE);
4780 }
4781 if (NO_ERROR != rc) {
4782 LOGE("Channel initialization failed %d", rc);
4783 pthread_mutex_unlock(&mMutex);
4784 goto error_exit;
4785 }
4786 }
4787
4788 if (mRawDumpChannel) {
4789 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4790 if (rc != NO_ERROR) {
4791 LOGE("Error: Raw Dump Channel init failed");
4792 pthread_mutex_unlock(&mMutex);
4793 goto error_exit;
4794 }
4795 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004796 if (mHdrPlusRawSrcChannel) {
4797 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4798 if (rc != NO_ERROR) {
4799 LOGE("Error: HDR+ RAW Source Channel init failed");
4800 pthread_mutex_unlock(&mMutex);
4801 goto error_exit;
4802 }
4803 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 if (mSupportChannel) {
4805 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4806 if (rc < 0) {
4807 LOGE("Support channel initialization failed");
4808 pthread_mutex_unlock(&mMutex);
4809 goto error_exit;
4810 }
4811 }
4812 if (mAnalysisChannel) {
4813 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4814 if (rc < 0) {
4815 LOGE("Analysis channel initialization failed");
4816 pthread_mutex_unlock(&mMutex);
4817 goto error_exit;
4818 }
4819 }
4820 if (mDummyBatchChannel) {
4821 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4822 if (rc < 0) {
4823 LOGE("mDummyBatchChannel setBatchSize failed");
4824 pthread_mutex_unlock(&mMutex);
4825 goto error_exit;
4826 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004827 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004828 if (rc < 0) {
4829 LOGE("mDummyBatchChannel initialization failed");
4830 pthread_mutex_unlock(&mMutex);
4831 goto error_exit;
4832 }
4833 }
4834
4835 // Set bundle info
4836 rc = setBundleInfo();
4837 if (rc < 0) {
4838 LOGE("setBundleInfo failed %d", rc);
4839 pthread_mutex_unlock(&mMutex);
4840 goto error_exit;
4841 }
4842
4843 //update settings from app here
4844 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4845 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4846 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4847 }
4848 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4849 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4850 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4851 }
4852 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4853 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4854 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4855
4856 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4857 (mLinkedCameraId != mCameraId) ) {
4858 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4859 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004860 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 goto error_exit;
4862 }
4863 }
4864
4865 // add bundle related cameras
4866 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4867 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004868 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4869 &m_pDualCamCmdPtr->bundle_info;
4870 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004871 if (mIsDeviceLinked)
4872 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4873 else
4874 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4875
4876 pthread_mutex_lock(&gCamLock);
4877
4878 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4879 LOGE("Dualcam: Invalid Session Id ");
4880 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 goto error_exit;
4883 }
4884
4885 if (mIsMainCamera == 1) {
4886 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4887 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004888 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004889 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004890 // related session id should be session id of linked session
4891 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4892 } else {
4893 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4894 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004895 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004896 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4898 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004899 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 pthread_mutex_unlock(&gCamLock);
4901
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004902 rc = mCameraHandle->ops->set_dual_cam_cmd(
4903 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 if (rc < 0) {
4905 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004907 goto error_exit;
4908 }
4909 }
4910
4911 //Then start them.
4912 LOGH("Start META Channel");
4913 rc = mMetadataChannel->start();
4914 if (rc < 0) {
4915 LOGE("META channel start failed");
4916 pthread_mutex_unlock(&mMutex);
4917 goto error_exit;
4918 }
4919
4920 if (mAnalysisChannel) {
4921 rc = mAnalysisChannel->start();
4922 if (rc < 0) {
4923 LOGE("Analysis channel start failed");
4924 mMetadataChannel->stop();
4925 pthread_mutex_unlock(&mMutex);
4926 goto error_exit;
4927 }
4928 }
4929
4930 if (mSupportChannel) {
4931 rc = mSupportChannel->start();
4932 if (rc < 0) {
4933 LOGE("Support channel start failed");
4934 mMetadataChannel->stop();
4935 /* Although support and analysis are mutually exclusive today
4936 adding it in anycase for future proofing */
4937 if (mAnalysisChannel) {
4938 mAnalysisChannel->stop();
4939 }
4940 pthread_mutex_unlock(&mMutex);
4941 goto error_exit;
4942 }
4943 }
4944 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4945 it != mStreamInfo.end(); it++) {
4946 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4947 LOGH("Start Processing Channel mask=%d",
4948 channel->getStreamTypeMask());
4949 rc = channel->start();
4950 if (rc < 0) {
4951 LOGE("channel start failed");
4952 pthread_mutex_unlock(&mMutex);
4953 goto error_exit;
4954 }
4955 }
4956
4957 if (mRawDumpChannel) {
4958 LOGD("Starting raw dump stream");
4959 rc = mRawDumpChannel->start();
4960 if (rc != NO_ERROR) {
4961 LOGE("Error Starting Raw Dump Channel");
4962 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4963 it != mStreamInfo.end(); it++) {
4964 QCamera3Channel *channel =
4965 (QCamera3Channel *)(*it)->stream->priv;
4966 LOGH("Stopping Processing Channel mask=%d",
4967 channel->getStreamTypeMask());
4968 channel->stop();
4969 }
4970 if (mSupportChannel)
4971 mSupportChannel->stop();
4972 if (mAnalysisChannel) {
4973 mAnalysisChannel->stop();
4974 }
4975 mMetadataChannel->stop();
4976 pthread_mutex_unlock(&mMutex);
4977 goto error_exit;
4978 }
4979 }
4980
4981 if (mChannelHandle) {
4982
4983 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4984 mChannelHandle);
4985 if (rc != NO_ERROR) {
4986 LOGE("start_channel failed %d", rc);
4987 pthread_mutex_unlock(&mMutex);
4988 goto error_exit;
4989 }
4990 }
4991
4992 goto no_error;
4993error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004994 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 return rc;
4996no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 mWokenUpByDaemon = false;
4998 mPendingLiveRequest = 0;
4999 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 }
5001
Chien-Yu Chenee335912017-02-09 17:53:20 -08005002 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005003 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005004 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5005 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5006 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5007 rc = enableHdrPlusModeLocked();
5008 if (rc != OK) {
5009 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5010 pthread_mutex_unlock(&mMutex);
5011 return rc;
5012 }
5013
5014 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5015 if (mHdrPlusRawSrcChannel) {
5016 rc = mHdrPlusRawSrcChannel->start();
5017 if (rc != OK) {
5018 LOGE("Error Starting HDR+ RAW Channel");
5019 pthread_mutex_unlock(&mMutex);
5020 return rc;
5021 }
5022 }
5023 mFirstPreviewIntentSeen = true;
5024 }
5025
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005027 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005028
5029 if (mFlushPerf) {
5030 //we cannot accept any requests during flush
5031 LOGE("process_capture_request cannot proceed during flush");
5032 pthread_mutex_unlock(&mMutex);
5033 return NO_ERROR; //should return an error
5034 }
5035
5036 if (meta.exists(ANDROID_REQUEST_ID)) {
5037 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5038 mCurrentRequestId = request_id;
5039 LOGD("Received request with id: %d", request_id);
5040 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5041 LOGE("Unable to find request id field, \
5042 & no previous id available");
5043 pthread_mutex_unlock(&mMutex);
5044 return NAME_NOT_FOUND;
5045 } else {
5046 LOGD("Re-using old request id");
5047 request_id = mCurrentRequestId;
5048 }
5049
5050 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5051 request->num_output_buffers,
5052 request->input_buffer,
5053 frameNumber);
5054 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005055 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005057 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 uint32_t snapshotStreamId = 0;
5059 for (size_t i = 0; i < request->num_output_buffers; i++) {
5060 const camera3_stream_buffer_t& output = request->output_buffers[i];
5061 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5062
Emilian Peev7650c122017-01-19 08:24:33 -08005063 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5064 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005065 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 blob_request = 1;
5067 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5068 }
5069
5070 if (output.acquire_fence != -1) {
5071 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5072 close(output.acquire_fence);
5073 if (rc != OK) {
5074 LOGE("sync wait failed %d", rc);
5075 pthread_mutex_unlock(&mMutex);
5076 return rc;
5077 }
5078 }
5079
Emilian Peev7650c122017-01-19 08:24:33 -08005080 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
5081 depthRequestPresent = true;
5082 continue;
5083 }
5084
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005085 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005086 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005087
5088 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5089 isVidBufRequested = true;
5090 }
5091 }
5092
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005093 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5094 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5095 itr++) {
5096 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5097 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5098 channel->getStreamID(channel->getStreamTypeMask());
5099
5100 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5101 isVidBufRequested = true;
5102 }
5103 }
5104
Thierry Strudel3d639192016-09-09 11:52:26 -07005105 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005106 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005107 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005108 }
5109 if (blob_request && mRawDumpChannel) {
5110 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005111 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005113 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 }
5115
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005116 {
5117 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5118 // Request a RAW buffer if
5119 // 1. mHdrPlusRawSrcChannel is valid.
5120 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5121 // 3. There is no pending HDR+ request.
5122 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5123 mHdrPlusPendingRequests.size() == 0) {
5124 streamsArray.stream_request[streamsArray.num_streams].streamID =
5125 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5126 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5127 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005128 }
5129
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005130 //extract capture intent
5131 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5132 mCaptureIntent =
5133 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5134 }
5135
5136 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5137 mCacMode =
5138 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5139 }
5140
5141 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005142 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005143
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005144 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005145 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005146 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5147 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005148 }
5149
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005150 if (hdrPlusRequest) {
5151 // For a HDR+ request, just set the frame parameters.
5152 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5153 if (rc < 0) {
5154 LOGE("fail to set frame parameters");
5155 pthread_mutex_unlock(&mMutex);
5156 return rc;
5157 }
5158 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005159 /* Parse the settings:
5160 * - For every request in NORMAL MODE
5161 * - For every request in HFR mode during preview only case
5162 * - For first request of every batch in HFR mode during video
5163 * recording. In batchmode the same settings except frame number is
5164 * repeated in each request of the batch.
5165 */
5166 if (!mBatchSize ||
5167 (mBatchSize && !isVidBufRequested) ||
5168 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005169 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005170 if (rc < 0) {
5171 LOGE("fail to set frame parameters");
5172 pthread_mutex_unlock(&mMutex);
5173 return rc;
5174 }
5175 }
5176 /* For batchMode HFR, setFrameParameters is not called for every
5177 * request. But only frame number of the latest request is parsed.
5178 * Keep track of first and last frame numbers in a batch so that
5179 * metadata for the frame numbers of batch can be duplicated in
5180 * handleBatchMetadta */
5181 if (mBatchSize) {
5182 if (!mToBeQueuedVidBufs) {
5183 //start of the batch
5184 mFirstFrameNumberInBatch = request->frame_number;
5185 }
5186 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5187 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5188 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005189 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005190 return BAD_VALUE;
5191 }
5192 }
5193 if (mNeedSensorRestart) {
5194 /* Unlock the mutex as restartSensor waits on the channels to be
5195 * stopped, which in turn calls stream callback functions -
5196 * handleBufferWithLock and handleMetadataWithLock */
5197 pthread_mutex_unlock(&mMutex);
5198 rc = dynamicUpdateMetaStreamInfo();
5199 if (rc != NO_ERROR) {
5200 LOGE("Restarting the sensor failed");
5201 return BAD_VALUE;
5202 }
5203 mNeedSensorRestart = false;
5204 pthread_mutex_lock(&mMutex);
5205 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005206 if(mResetInstantAEC) {
5207 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5208 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5209 mResetInstantAEC = false;
5210 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005211 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005212 if (request->input_buffer->acquire_fence != -1) {
5213 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5214 close(request->input_buffer->acquire_fence);
5215 if (rc != OK) {
5216 LOGE("input buffer sync wait failed %d", rc);
5217 pthread_mutex_unlock(&mMutex);
5218 return rc;
5219 }
5220 }
5221 }
5222
5223 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5224 mLastCustIntentFrmNum = frameNumber;
5225 }
5226 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005227 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 pendingRequestIterator latestRequest;
5229 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005230 pendingRequest.num_buffers = depthRequestPresent ?
5231 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 pendingRequest.request_id = request_id;
5233 pendingRequest.blob_request = blob_request;
5234 pendingRequest.timestamp = 0;
5235 pendingRequest.bUrgentReceived = 0;
5236 if (request->input_buffer) {
5237 pendingRequest.input_buffer =
5238 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5239 *(pendingRequest.input_buffer) = *(request->input_buffer);
5240 pInputBuffer = pendingRequest.input_buffer;
5241 } else {
5242 pendingRequest.input_buffer = NULL;
5243 pInputBuffer = NULL;
5244 }
5245
5246 pendingRequest.pipeline_depth = 0;
5247 pendingRequest.partial_result_cnt = 0;
5248 extractJpegMetadata(mCurJpegMeta, request);
5249 pendingRequest.jpegMetadata = mCurJpegMeta;
5250 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5251 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005252 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005253 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5254 mHybridAeEnable =
5255 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5256 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005257
5258 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5259 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005260 /* DevCamDebug metadata processCaptureRequest */
5261 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5262 mDevCamDebugMetaEnable =
5263 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5264 }
5265 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5266 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005267
5268 //extract CAC info
5269 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5270 mCacMode =
5271 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5272 }
5273 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005274 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005275
5276 PendingBuffersInRequest bufsForCurRequest;
5277 bufsForCurRequest.frame_number = frameNumber;
5278 // Mark current timestamp for the new request
5279 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005280 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005281
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005282 if (hdrPlusRequest) {
5283 // Save settings for this request.
5284 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5285 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5286
5287 // Add to pending HDR+ request queue.
5288 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5289 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5290
5291 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5292 }
5293
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005295 if (request->output_buffers[i].stream->data_space ==
5296 HAL_DATASPACE_DEPTH) {
5297 continue;
5298 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005299 RequestedBufferInfo requestedBuf;
5300 memset(&requestedBuf, 0, sizeof(requestedBuf));
5301 requestedBuf.stream = request->output_buffers[i].stream;
5302 requestedBuf.buffer = NULL;
5303 pendingRequest.buffers.push_back(requestedBuf);
5304
5305 // Add to buffer handle the pending buffers list
5306 PendingBufferInfo bufferInfo;
5307 bufferInfo.buffer = request->output_buffers[i].buffer;
5308 bufferInfo.stream = request->output_buffers[i].stream;
5309 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5310 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5311 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5312 frameNumber, bufferInfo.buffer,
5313 channel->getStreamTypeMask(), bufferInfo.stream->format);
5314 }
5315 // Add this request packet into mPendingBuffersMap
5316 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5317 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5318 mPendingBuffersMap.get_num_overall_buffers());
5319
5320 latestRequest = mPendingRequestsList.insert(
5321 mPendingRequestsList.end(), pendingRequest);
5322 if(mFlush) {
5323 LOGI("mFlush is true");
5324 pthread_mutex_unlock(&mMutex);
5325 return NO_ERROR;
5326 }
5327
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005328 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5329 // channel.
5330 if (!hdrPlusRequest) {
5331 int indexUsed;
5332 // Notify metadata channel we receive a request
5333 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005335 if(request->input_buffer != NULL){
5336 LOGD("Input request, frame_number %d", frameNumber);
5337 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5338 if (NO_ERROR != rc) {
5339 LOGE("fail to set reproc parameters");
5340 pthread_mutex_unlock(&mMutex);
5341 return rc;
5342 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005343 }
5344
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005345 // Call request on other streams
5346 uint32_t streams_need_metadata = 0;
5347 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5348 for (size_t i = 0; i < request->num_output_buffers; i++) {
5349 const camera3_stream_buffer_t& output = request->output_buffers[i];
5350 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5351
5352 if (channel == NULL) {
5353 LOGW("invalid channel pointer for stream");
5354 continue;
5355 }
5356
5357 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5358 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5359 output.buffer, request->input_buffer, frameNumber);
5360 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005361 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005362 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5363 if (rc < 0) {
5364 LOGE("Fail to request on picture channel");
5365 pthread_mutex_unlock(&mMutex);
5366 return rc;
5367 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005368 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005369 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5370 assert(NULL != mDepthChannel);
5371 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005372
Emilian Peev7650c122017-01-19 08:24:33 -08005373 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5374 if (rc < 0) {
5375 LOGE("Fail to map on depth buffer");
5376 pthread_mutex_unlock(&mMutex);
5377 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005378 }
Emilian Peev7650c122017-01-19 08:24:33 -08005379 } else {
5380 LOGD("snapshot request with buffer %p, frame_number %d",
5381 output.buffer, frameNumber);
5382 if (!request->settings) {
5383 rc = channel->request(output.buffer, frameNumber,
5384 NULL, mPrevParameters, indexUsed);
5385 } else {
5386 rc = channel->request(output.buffer, frameNumber,
5387 NULL, mParameters, indexUsed);
5388 }
5389 if (rc < 0) {
5390 LOGE("Fail to request on picture channel");
5391 pthread_mutex_unlock(&mMutex);
5392 return rc;
5393 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005394
Emilian Peev7650c122017-01-19 08:24:33 -08005395 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5396 uint32_t j = 0;
5397 for (j = 0; j < streamsArray.num_streams; j++) {
5398 if (streamsArray.stream_request[j].streamID == streamId) {
5399 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5400 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5401 else
5402 streamsArray.stream_request[j].buf_index = indexUsed;
5403 break;
5404 }
5405 }
5406 if (j == streamsArray.num_streams) {
5407 LOGE("Did not find matching stream to update index");
5408 assert(0);
5409 }
5410
5411 pendingBufferIter->need_metadata = true;
5412 streams_need_metadata++;
5413 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005414 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005415 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5416 bool needMetadata = false;
5417 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5418 rc = yuvChannel->request(output.buffer, frameNumber,
5419 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5420 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005422 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 pthread_mutex_unlock(&mMutex);
5424 return rc;
5425 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005426
5427 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5428 uint32_t j = 0;
5429 for (j = 0; j < streamsArray.num_streams; j++) {
5430 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005431 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5432 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5433 else
5434 streamsArray.stream_request[j].buf_index = indexUsed;
5435 break;
5436 }
5437 }
5438 if (j == streamsArray.num_streams) {
5439 LOGE("Did not find matching stream to update index");
5440 assert(0);
5441 }
5442
5443 pendingBufferIter->need_metadata = needMetadata;
5444 if (needMetadata)
5445 streams_need_metadata += 1;
5446 LOGD("calling YUV channel request, need_metadata is %d",
5447 needMetadata);
5448 } else {
5449 LOGD("request with buffer %p, frame_number %d",
5450 output.buffer, frameNumber);
5451
5452 rc = channel->request(output.buffer, frameNumber, indexUsed);
5453
5454 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5455 uint32_t j = 0;
5456 for (j = 0; j < streamsArray.num_streams; j++) {
5457 if (streamsArray.stream_request[j].streamID == streamId) {
5458 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5459 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5460 else
5461 streamsArray.stream_request[j].buf_index = indexUsed;
5462 break;
5463 }
5464 }
5465 if (j == streamsArray.num_streams) {
5466 LOGE("Did not find matching stream to update index");
5467 assert(0);
5468 }
5469
5470 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5471 && mBatchSize) {
5472 mToBeQueuedVidBufs++;
5473 if (mToBeQueuedVidBufs == mBatchSize) {
5474 channel->queueBatchBuf();
5475 }
5476 }
5477 if (rc < 0) {
5478 LOGE("request failed");
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
5482 }
5483 pendingBufferIter++;
5484 }
5485
5486 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5487 itr++) {
5488 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5489
5490 if (channel == NULL) {
5491 LOGE("invalid channel pointer for stream");
5492 assert(0);
5493 return BAD_VALUE;
5494 }
5495
5496 InternalRequest requestedStream;
5497 requestedStream = (*itr);
5498
5499
5500 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5501 LOGD("snapshot request internally input buffer %p, frame_number %d",
5502 request->input_buffer, frameNumber);
5503 if(request->input_buffer != NULL){
5504 rc = channel->request(NULL, frameNumber,
5505 pInputBuffer, &mReprocMeta, indexUsed, true,
5506 requestedStream.meteringOnly);
5507 if (rc < 0) {
5508 LOGE("Fail to request on picture channel");
5509 pthread_mutex_unlock(&mMutex);
5510 return rc;
5511 }
5512 } else {
5513 LOGD("snapshot request with frame_number %d", frameNumber);
5514 if (!request->settings) {
5515 rc = channel->request(NULL, frameNumber,
5516 NULL, mPrevParameters, indexUsed, true,
5517 requestedStream.meteringOnly);
5518 } else {
5519 rc = channel->request(NULL, frameNumber,
5520 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5521 }
5522 if (rc < 0) {
5523 LOGE("Fail to request on picture channel");
5524 pthread_mutex_unlock(&mMutex);
5525 return rc;
5526 }
5527
5528 if ((*itr).meteringOnly != 1) {
5529 requestedStream.need_metadata = 1;
5530 streams_need_metadata++;
5531 }
5532 }
5533
5534 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5535 uint32_t j = 0;
5536 for (j = 0; j < streamsArray.num_streams; j++) {
5537 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005538 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5539 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5540 else
5541 streamsArray.stream_request[j].buf_index = indexUsed;
5542 break;
5543 }
5544 }
5545 if (j == streamsArray.num_streams) {
5546 LOGE("Did not find matching stream to update index");
5547 assert(0);
5548 }
5549
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005550 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005552 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005554 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005556 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005557
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005558 //If 2 streams have need_metadata set to true, fail the request, unless
5559 //we copy/reference count the metadata buffer
5560 if (streams_need_metadata > 1) {
5561 LOGE("not supporting request in which two streams requires"
5562 " 2 HAL metadata for reprocessing");
5563 pthread_mutex_unlock(&mMutex);
5564 return -EINVAL;
5565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005566
Emilian Peev7650c122017-01-19 08:24:33 -08005567 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5569 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5570 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5571 pthread_mutex_unlock(&mMutex);
5572 return BAD_VALUE;
5573 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 if (request->input_buffer == NULL) {
5575 /* Set the parameters to backend:
5576 * - For every request in NORMAL MODE
5577 * - For every request in HFR mode during preview only case
5578 * - Once every batch in HFR mode during video recording
5579 */
5580 if (!mBatchSize ||
5581 (mBatchSize && !isVidBufRequested) ||
5582 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5583 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5584 mBatchSize, isVidBufRequested,
5585 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005586
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005587 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5588 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5589 uint32_t m = 0;
5590 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5591 if (streamsArray.stream_request[k].streamID ==
5592 mBatchedStreamsArray.stream_request[m].streamID)
5593 break;
5594 }
5595 if (m == mBatchedStreamsArray.num_streams) {
5596 mBatchedStreamsArray.stream_request\
5597 [mBatchedStreamsArray.num_streams].streamID =
5598 streamsArray.stream_request[k].streamID;
5599 mBatchedStreamsArray.stream_request\
5600 [mBatchedStreamsArray.num_streams].buf_index =
5601 streamsArray.stream_request[k].buf_index;
5602 mBatchedStreamsArray.num_streams =
5603 mBatchedStreamsArray.num_streams + 1;
5604 }
5605 }
5606 streamsArray = mBatchedStreamsArray;
5607 }
5608 /* Update stream id of all the requested buffers */
5609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5610 streamsArray)) {
5611 LOGE("Failed to set stream type mask in the parameters");
5612 return BAD_VALUE;
5613 }
5614
5615 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5616 mParameters);
5617 if (rc < 0) {
5618 LOGE("set_parms failed");
5619 }
5620 /* reset to zero coz, the batch is queued */
5621 mToBeQueuedVidBufs = 0;
5622 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5623 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5624 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005625 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5626 uint32_t m = 0;
5627 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5628 if (streamsArray.stream_request[k].streamID ==
5629 mBatchedStreamsArray.stream_request[m].streamID)
5630 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005631 }
5632 if (m == mBatchedStreamsArray.num_streams) {
5633 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5634 streamID = streamsArray.stream_request[k].streamID;
5635 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5636 buf_index = streamsArray.stream_request[k].buf_index;
5637 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5638 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005639 }
5640 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005641 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005643 }
5644
5645 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5646
5647 mState = STARTED;
5648 // Added a timed condition wait
5649 struct timespec ts;
5650 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005651 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005652 if (rc < 0) {
5653 isValidTimeout = 0;
5654 LOGE("Error reading the real time clock!!");
5655 }
5656 else {
5657 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 int64_t timeout = 5;
5659 {
5660 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5661 // If there is a pending HDR+ request, the following requests may be blocked until the
5662 // HDR+ request is done. So allow a longer timeout.
5663 if (mHdrPlusPendingRequests.size() > 0) {
5664 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5665 }
5666 }
5667 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005668 }
5669 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005670 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005671 (mState != ERROR) && (mState != DEINIT)) {
5672 if (!isValidTimeout) {
5673 LOGD("Blocking on conditional wait");
5674 pthread_cond_wait(&mRequestCond, &mMutex);
5675 }
5676 else {
5677 LOGD("Blocking on timed conditional wait");
5678 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5679 if (rc == ETIMEDOUT) {
5680 rc = -ENODEV;
5681 LOGE("Unblocked on timeout!!!!");
5682 break;
5683 }
5684 }
5685 LOGD("Unblocked");
5686 if (mWokenUpByDaemon) {
5687 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005688 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005689 break;
5690 }
5691 }
5692 pthread_mutex_unlock(&mMutex);
5693
5694 return rc;
5695}
5696
5697/*===========================================================================
5698 * FUNCTION : dump
5699 *
5700 * DESCRIPTION:
5701 *
5702 * PARAMETERS :
5703 *
5704 *
5705 * RETURN :
5706 *==========================================================================*/
5707void QCamera3HardwareInterface::dump(int fd)
5708{
5709 pthread_mutex_lock(&mMutex);
5710 dprintf(fd, "\n Camera HAL3 information Begin \n");
5711
5712 dprintf(fd, "\nNumber of pending requests: %zu \n",
5713 mPendingRequestsList.size());
5714 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5715 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5716 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5717 for(pendingRequestIterator i = mPendingRequestsList.begin();
5718 i != mPendingRequestsList.end(); i++) {
5719 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5720 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5721 i->input_buffer);
5722 }
5723 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5724 mPendingBuffersMap.get_num_overall_buffers());
5725 dprintf(fd, "-------+------------------\n");
5726 dprintf(fd, " Frame | Stream type mask \n");
5727 dprintf(fd, "-------+------------------\n");
5728 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5729 for(auto &j : req.mPendingBufferList) {
5730 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5731 dprintf(fd, " %5d | %11d \n",
5732 req.frame_number, channel->getStreamTypeMask());
5733 }
5734 }
5735 dprintf(fd, "-------+------------------\n");
5736
5737 dprintf(fd, "\nPending frame drop list: %zu\n",
5738 mPendingFrameDropList.size());
5739 dprintf(fd, "-------+-----------\n");
5740 dprintf(fd, " Frame | Stream ID \n");
5741 dprintf(fd, "-------+-----------\n");
5742 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5743 i != mPendingFrameDropList.end(); i++) {
5744 dprintf(fd, " %5d | %9d \n",
5745 i->frame_number, i->stream_ID);
5746 }
5747 dprintf(fd, "-------+-----------\n");
5748
5749 dprintf(fd, "\n Camera HAL3 information End \n");
5750
5751 /* use dumpsys media.camera as trigger to send update debug level event */
5752 mUpdateDebugLevel = true;
5753 pthread_mutex_unlock(&mMutex);
5754 return;
5755}
5756
5757/*===========================================================================
5758 * FUNCTION : flush
5759 *
5760 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5761 * conditionally restarts channels
5762 *
5763 * PARAMETERS :
5764 * @ restartChannels: re-start all channels
5765 *
5766 *
5767 * RETURN :
5768 * 0 on success
5769 * Error code on failure
5770 *==========================================================================*/
5771int QCamera3HardwareInterface::flush(bool restartChannels)
5772{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005773 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005774 int32_t rc = NO_ERROR;
5775
5776 LOGD("Unblocking Process Capture Request");
5777 pthread_mutex_lock(&mMutex);
5778 mFlush = true;
5779 pthread_mutex_unlock(&mMutex);
5780
5781 rc = stopAllChannels();
5782 // unlink of dualcam
5783 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005784 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5785 &m_pDualCamCmdPtr->bundle_info;
5786 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005787 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5788 pthread_mutex_lock(&gCamLock);
5789
5790 if (mIsMainCamera == 1) {
5791 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5792 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005793 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005794 // related session id should be session id of linked session
5795 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5796 } else {
5797 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5798 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005799 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005800 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5801 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005802 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005803 pthread_mutex_unlock(&gCamLock);
5804
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005805 rc = mCameraHandle->ops->set_dual_cam_cmd(
5806 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005807 if (rc < 0) {
5808 LOGE("Dualcam: Unlink failed, but still proceed to close");
5809 }
5810 }
5811
5812 if (rc < 0) {
5813 LOGE("stopAllChannels failed");
5814 return rc;
5815 }
5816 if (mChannelHandle) {
5817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5818 mChannelHandle);
5819 }
5820
5821 // Reset bundle info
5822 rc = setBundleInfo();
5823 if (rc < 0) {
5824 LOGE("setBundleInfo failed %d", rc);
5825 return rc;
5826 }
5827
5828 // Mutex Lock
5829 pthread_mutex_lock(&mMutex);
5830
5831 // Unblock process_capture_request
5832 mPendingLiveRequest = 0;
5833 pthread_cond_signal(&mRequestCond);
5834
5835 rc = notifyErrorForPendingRequests();
5836 if (rc < 0) {
5837 LOGE("notifyErrorForPendingRequests failed");
5838 pthread_mutex_unlock(&mMutex);
5839 return rc;
5840 }
5841
5842 mFlush = false;
5843
5844 // Start the Streams/Channels
5845 if (restartChannels) {
5846 rc = startAllChannels();
5847 if (rc < 0) {
5848 LOGE("startAllChannels failed");
5849 pthread_mutex_unlock(&mMutex);
5850 return rc;
5851 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005852 if (mChannelHandle) {
5853 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5854 mChannelHandle);
5855 if (rc < 0) {
5856 LOGE("start_channel failed");
5857 pthread_mutex_unlock(&mMutex);
5858 return rc;
5859 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005860 }
5861 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005862 pthread_mutex_unlock(&mMutex);
5863
5864 return 0;
5865}
5866
5867/*===========================================================================
5868 * FUNCTION : flushPerf
5869 *
5870 * DESCRIPTION: This is the performance optimization version of flush that does
5871 * not use stream off, rather flushes the system
5872 *
5873 * PARAMETERS :
5874 *
5875 *
5876 * RETURN : 0 : success
5877 * -EINVAL: input is malformed (device is not valid)
5878 * -ENODEV: if the device has encountered a serious error
5879 *==========================================================================*/
5880int QCamera3HardwareInterface::flushPerf()
5881{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005882 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005883 int32_t rc = 0;
5884 struct timespec timeout;
5885 bool timed_wait = false;
5886
5887 pthread_mutex_lock(&mMutex);
5888 mFlushPerf = true;
5889 mPendingBuffersMap.numPendingBufsAtFlush =
5890 mPendingBuffersMap.get_num_overall_buffers();
5891 LOGD("Calling flush. Wait for %d buffers to return",
5892 mPendingBuffersMap.numPendingBufsAtFlush);
5893
5894 /* send the flush event to the backend */
5895 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5896 if (rc < 0) {
5897 LOGE("Error in flush: IOCTL failure");
5898 mFlushPerf = false;
5899 pthread_mutex_unlock(&mMutex);
5900 return -ENODEV;
5901 }
5902
5903 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5904 LOGD("No pending buffers in HAL, return flush");
5905 mFlushPerf = false;
5906 pthread_mutex_unlock(&mMutex);
5907 return rc;
5908 }
5909
5910 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005911 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005912 if (rc < 0) {
5913 LOGE("Error reading the real time clock, cannot use timed wait");
5914 } else {
5915 timeout.tv_sec += FLUSH_TIMEOUT;
5916 timed_wait = true;
5917 }
5918
5919 //Block on conditional variable
5920 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5921 LOGD("Waiting on mBuffersCond");
5922 if (!timed_wait) {
5923 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5924 if (rc != 0) {
5925 LOGE("pthread_cond_wait failed due to rc = %s",
5926 strerror(rc));
5927 break;
5928 }
5929 } else {
5930 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5931 if (rc != 0) {
5932 LOGE("pthread_cond_timedwait failed due to rc = %s",
5933 strerror(rc));
5934 break;
5935 }
5936 }
5937 }
5938 if (rc != 0) {
5939 mFlushPerf = false;
5940 pthread_mutex_unlock(&mMutex);
5941 return -ENODEV;
5942 }
5943
5944 LOGD("Received buffers, now safe to return them");
5945
5946 //make sure the channels handle flush
5947 //currently only required for the picture channel to release snapshot resources
5948 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5949 it != mStreamInfo.end(); it++) {
5950 QCamera3Channel *channel = (*it)->channel;
5951 if (channel) {
5952 rc = channel->flush();
5953 if (rc) {
5954 LOGE("Flushing the channels failed with error %d", rc);
5955 // even though the channel flush failed we need to continue and
5956 // return the buffers we have to the framework, however the return
5957 // value will be an error
5958 rc = -ENODEV;
5959 }
5960 }
5961 }
5962
5963 /* notify the frameworks and send errored results */
5964 rc = notifyErrorForPendingRequests();
5965 if (rc < 0) {
5966 LOGE("notifyErrorForPendingRequests failed");
5967 pthread_mutex_unlock(&mMutex);
5968 return rc;
5969 }
5970
5971 //unblock process_capture_request
5972 mPendingLiveRequest = 0;
5973 unblockRequestIfNecessary();
5974
5975 mFlushPerf = false;
5976 pthread_mutex_unlock(&mMutex);
5977 LOGD ("Flush Operation complete. rc = %d", rc);
5978 return rc;
5979}
5980
5981/*===========================================================================
5982 * FUNCTION : handleCameraDeviceError
5983 *
5984 * DESCRIPTION: This function calls internal flush and notifies the error to
5985 * framework and updates the state variable.
5986 *
5987 * PARAMETERS : None
5988 *
5989 * RETURN : NO_ERROR on Success
5990 * Error code on failure
5991 *==========================================================================*/
5992int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5993{
5994 int32_t rc = NO_ERROR;
5995
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005996 {
5997 Mutex::Autolock lock(mFlushLock);
5998 pthread_mutex_lock(&mMutex);
5999 if (mState != ERROR) {
6000 //if mState != ERROR, nothing to be done
6001 pthread_mutex_unlock(&mMutex);
6002 return NO_ERROR;
6003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006004 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006005
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006006 rc = flush(false /* restart channels */);
6007 if (NO_ERROR != rc) {
6008 LOGE("internal flush to handle mState = ERROR failed");
6009 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006010
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006011 pthread_mutex_lock(&mMutex);
6012 mState = DEINIT;
6013 pthread_mutex_unlock(&mMutex);
6014 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006015
6016 camera3_notify_msg_t notify_msg;
6017 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6018 notify_msg.type = CAMERA3_MSG_ERROR;
6019 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6020 notify_msg.message.error.error_stream = NULL;
6021 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006022 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006023
6024 return rc;
6025}
6026
6027/*===========================================================================
6028 * FUNCTION : captureResultCb
6029 *
6030 * DESCRIPTION: Callback handler for all capture result
6031 * (streams, as well as metadata)
6032 *
6033 * PARAMETERS :
6034 * @metadata : metadata information
6035 * @buffer : actual gralloc buffer to be returned to frameworks.
6036 * NULL if metadata.
6037 *
6038 * RETURN : NONE
6039 *==========================================================================*/
6040void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6041 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6042{
6043 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006044 pthread_mutex_lock(&mMutex);
6045 uint8_t batchSize = mBatchSize;
6046 pthread_mutex_unlock(&mMutex);
6047 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006048 handleBatchMetadata(metadata_buf,
6049 true /* free_and_bufdone_meta_buf */);
6050 } else { /* mBatchSize = 0 */
6051 hdrPlusPerfLock(metadata_buf);
6052 pthread_mutex_lock(&mMutex);
6053 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006054 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006055 false /* first frame of batch metadata */ ,
6056 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006057 pthread_mutex_unlock(&mMutex);
6058 }
6059 } else if (isInputBuffer) {
6060 pthread_mutex_lock(&mMutex);
6061 handleInputBufferWithLock(frame_number);
6062 pthread_mutex_unlock(&mMutex);
6063 } else {
6064 pthread_mutex_lock(&mMutex);
6065 handleBufferWithLock(buffer, frame_number);
6066 pthread_mutex_unlock(&mMutex);
6067 }
6068 return;
6069}
6070
6071/*===========================================================================
6072 * FUNCTION : getReprocessibleOutputStreamId
6073 *
6074 * DESCRIPTION: Get source output stream id for the input reprocess stream
6075 * based on size and format, which would be the largest
6076 * output stream if an input stream exists.
6077 *
6078 * PARAMETERS :
6079 * @id : return the stream id if found
6080 *
6081 * RETURN : int32_t type of status
6082 * NO_ERROR -- success
6083 * none-zero failure code
6084 *==========================================================================*/
6085int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6086{
6087 /* check if any output or bidirectional stream with the same size and format
6088 and return that stream */
6089 if ((mInputStreamInfo.dim.width > 0) &&
6090 (mInputStreamInfo.dim.height > 0)) {
6091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6092 it != mStreamInfo.end(); it++) {
6093
6094 camera3_stream_t *stream = (*it)->stream;
6095 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6096 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6097 (stream->format == mInputStreamInfo.format)) {
6098 // Usage flag for an input stream and the source output stream
6099 // may be different.
6100 LOGD("Found reprocessible output stream! %p", *it);
6101 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6102 stream->usage, mInputStreamInfo.usage);
6103
6104 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6105 if (channel != NULL && channel->mStreams[0]) {
6106 id = channel->mStreams[0]->getMyServerID();
6107 return NO_ERROR;
6108 }
6109 }
6110 }
6111 } else {
6112 LOGD("No input stream, so no reprocessible output stream");
6113 }
6114 return NAME_NOT_FOUND;
6115}
6116
6117/*===========================================================================
6118 * FUNCTION : lookupFwkName
6119 *
6120 * DESCRIPTION: In case the enum is not same in fwk and backend
6121 * make sure the parameter is correctly propogated
6122 *
6123 * PARAMETERS :
6124 * @arr : map between the two enums
6125 * @len : len of the map
6126 * @hal_name : name of the hal_parm to map
6127 *
6128 * RETURN : int type of status
6129 * fwk_name -- success
6130 * none-zero failure code
6131 *==========================================================================*/
6132template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6133 size_t len, halType hal_name)
6134{
6135
6136 for (size_t i = 0; i < len; i++) {
6137 if (arr[i].hal_name == hal_name) {
6138 return arr[i].fwk_name;
6139 }
6140 }
6141
6142 /* Not able to find matching framework type is not necessarily
6143 * an error case. This happens when mm-camera supports more attributes
6144 * than the frameworks do */
6145 LOGH("Cannot find matching framework type");
6146 return NAME_NOT_FOUND;
6147}
6148
6149/*===========================================================================
6150 * FUNCTION : lookupHalName
6151 *
6152 * DESCRIPTION: In case the enum is not same in fwk and backend
6153 * make sure the parameter is correctly propogated
6154 *
6155 * PARAMETERS :
6156 * @arr : map between the two enums
6157 * @len : len of the map
6158 * @fwk_name : name of the hal_parm to map
6159 *
6160 * RETURN : int32_t type of status
6161 * hal_name -- success
6162 * none-zero failure code
6163 *==========================================================================*/
6164template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6165 size_t len, fwkType fwk_name)
6166{
6167 for (size_t i = 0; i < len; i++) {
6168 if (arr[i].fwk_name == fwk_name) {
6169 return arr[i].hal_name;
6170 }
6171 }
6172
6173 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6174 return NAME_NOT_FOUND;
6175}
6176
6177/*===========================================================================
6178 * FUNCTION : lookupProp
6179 *
6180 * DESCRIPTION: lookup a value by its name
6181 *
6182 * PARAMETERS :
6183 * @arr : map between the two enums
6184 * @len : size of the map
6185 * @name : name to be looked up
6186 *
6187 * RETURN : Value if found
6188 * CAM_CDS_MODE_MAX if not found
6189 *==========================================================================*/
6190template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6191 size_t len, const char *name)
6192{
6193 if (name) {
6194 for (size_t i = 0; i < len; i++) {
6195 if (!strcmp(arr[i].desc, name)) {
6196 return arr[i].val;
6197 }
6198 }
6199 }
6200 return CAM_CDS_MODE_MAX;
6201}
6202
6203/*===========================================================================
6204 *
6205 * DESCRIPTION:
6206 *
6207 * PARAMETERS :
6208 * @metadata : metadata information from callback
6209 * @timestamp: metadata buffer timestamp
6210 * @request_id: request id
6211 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006212 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006213 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6214 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006215 * @pprocDone: whether internal offline postprocsesing is done
6216 *
6217 * RETURN : camera_metadata_t*
6218 * metadata in a format specified by fwk
6219 *==========================================================================*/
6220camera_metadata_t*
6221QCamera3HardwareInterface::translateFromHalMetadata(
6222 metadata_buffer_t *metadata,
6223 nsecs_t timestamp,
6224 int32_t request_id,
6225 const CameraMetadata& jpegMetadata,
6226 uint8_t pipeline_depth,
6227 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006228 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006229 /* DevCamDebug metadata translateFromHalMetadata argument */
6230 uint8_t DevCamDebug_meta_enable,
6231 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006232 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006233 uint8_t fwk_cacMode,
6234 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006235{
6236 CameraMetadata camMetadata;
6237 camera_metadata_t *resultMetadata;
6238
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006239 if (mBatchSize && !firstMetadataInBatch) {
6240 /* In batch mode, use cached metadata from the first metadata
6241 in the batch */
6242 camMetadata.clear();
6243 camMetadata = mCachedMetadata;
6244 }
6245
Thierry Strudel3d639192016-09-09 11:52:26 -07006246 if (jpegMetadata.entryCount())
6247 camMetadata.append(jpegMetadata);
6248
6249 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6250 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6251 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6252 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006253 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006254 if (mBatchSize == 0) {
6255 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6256 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006258
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006259 if (mBatchSize && !firstMetadataInBatch) {
6260 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6261 resultMetadata = camMetadata.release();
6262 return resultMetadata;
6263 }
6264
Samuel Ha68ba5172016-12-15 18:41:12 -08006265 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6266 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6267 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6268 // DevCamDebug metadata translateFromHalMetadata AF
6269 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6270 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6271 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6272 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6273 }
6274 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6275 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6276 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6277 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6278 }
6279 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6280 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6281 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6282 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6283 }
6284 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6285 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6286 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6287 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6288 }
6289 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6290 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6291 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6292 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6293 }
6294 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6295 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6296 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6297 *DevCamDebug_af_monitor_pdaf_target_pos;
6298 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6299 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6300 }
6301 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6302 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6303 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6304 *DevCamDebug_af_monitor_pdaf_confidence;
6305 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6306 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6307 }
6308 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6309 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6310 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6311 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6312 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6313 }
6314 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6315 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6316 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6317 *DevCamDebug_af_monitor_tof_target_pos;
6318 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6319 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6320 }
6321 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6322 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6323 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6324 *DevCamDebug_af_monitor_tof_confidence;
6325 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6326 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6327 }
6328 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6329 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6330 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6331 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6332 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6333 }
6334 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6335 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6336 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6337 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6338 &fwk_DevCamDebug_af_monitor_type_select, 1);
6339 }
6340 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6341 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6342 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6343 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6344 &fwk_DevCamDebug_af_monitor_refocus, 1);
6345 }
6346 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6347 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6348 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6349 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6350 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6351 }
6352 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6353 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6354 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6355 *DevCamDebug_af_search_pdaf_target_pos;
6356 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6357 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6358 }
6359 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6360 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6361 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6362 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6363 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6364 }
6365 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6366 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6367 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6368 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6369 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6370 }
6371 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6372 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6373 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6374 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6375 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6376 }
6377 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6378 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6379 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6380 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6381 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6382 }
6383 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6384 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6385 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6386 *DevCamDebug_af_search_tof_target_pos;
6387 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6388 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6389 }
6390 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6391 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6392 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6393 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6394 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6395 }
6396 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6397 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6398 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6399 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6400 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6401 }
6402 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6403 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6404 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6405 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6406 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6407 }
6408 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6409 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6410 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6411 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6412 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6413 }
6414 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6415 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6416 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6417 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6418 &fwk_DevCamDebug_af_search_type_select, 1);
6419 }
6420 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6421 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6422 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6423 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6424 &fwk_DevCamDebug_af_search_next_pos, 1);
6425 }
6426 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6427 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6428 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6429 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6430 &fwk_DevCamDebug_af_search_target_pos, 1);
6431 }
6432 // DevCamDebug metadata translateFromHalMetadata AEC
6433 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6434 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6435 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6436 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6437 }
6438 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6439 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6440 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6441 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6442 }
6443 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6444 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6445 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6446 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6447 }
6448 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6449 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6450 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6451 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6452 }
6453 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6454 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6455 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6456 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6457 }
6458 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6459 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6460 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6461 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6462 }
6463 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6464 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6465 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6466 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6467 }
6468 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6469 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6470 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6471 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6472 }
Samuel Ha34229982017-02-17 13:51:11 -08006473 // DevCamDebug metadata translateFromHalMetadata zzHDR
6474 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6475 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6476 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6477 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6478 }
6479 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6480 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6481 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6482 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6483 }
6484 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6485 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6486 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6487 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6488 }
6489 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6490 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6491 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6492 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6493 }
6494 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6495 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6496 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6497 *DevCamDebug_aec_hdr_sensitivity_ratio;
6498 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6499 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6500 }
6501 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6502 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6503 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6504 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6505 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6506 }
6507 // DevCamDebug metadata translateFromHalMetadata ADRC
6508 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6509 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6510 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6511 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6512 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6513 }
6514 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6515 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6516 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6517 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6518 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6519 }
6520 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6521 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6522 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6523 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6524 }
6525 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6526 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6527 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6528 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6529 }
6530 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6531 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6532 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6533 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6534 }
6535 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6536 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6537 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6538 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6539 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006540 // DevCamDebug metadata translateFromHalMetadata AWB
6541 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6542 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6543 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6544 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6545 }
6546 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6547 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6548 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6549 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6550 }
6551 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6552 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6553 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6554 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6555 }
6556 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6557 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6558 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6559 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6560 }
6561 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6562 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6563 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6564 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6565 }
6566 }
6567 // atrace_end(ATRACE_TAG_ALWAYS);
6568
Thierry Strudel3d639192016-09-09 11:52:26 -07006569 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6570 int64_t fwk_frame_number = *frame_number;
6571 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6572 }
6573
6574 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6575 int32_t fps_range[2];
6576 fps_range[0] = (int32_t)float_range->min_fps;
6577 fps_range[1] = (int32_t)float_range->max_fps;
6578 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6579 fps_range, 2);
6580 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6581 fps_range[0], fps_range[1]);
6582 }
6583
6584 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6585 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6586 }
6587
6588 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6589 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6590 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6591 *sceneMode);
6592 if (NAME_NOT_FOUND != val) {
6593 uint8_t fwkSceneMode = (uint8_t)val;
6594 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6595 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6596 fwkSceneMode);
6597 }
6598 }
6599
6600 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6601 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6602 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6603 }
6604
6605 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6606 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6607 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6608 }
6609
6610 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6611 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6612 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6613 }
6614
6615 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6616 CAM_INTF_META_EDGE_MODE, metadata) {
6617 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6618 }
6619
6620 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6621 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6622 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6623 }
6624
6625 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6626 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6627 }
6628
6629 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6630 if (0 <= *flashState) {
6631 uint8_t fwk_flashState = (uint8_t) *flashState;
6632 if (!gCamCapability[mCameraId]->flash_available) {
6633 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6634 }
6635 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6636 }
6637 }
6638
6639 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6640 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6641 if (NAME_NOT_FOUND != val) {
6642 uint8_t fwk_flashMode = (uint8_t)val;
6643 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6644 }
6645 }
6646
6647 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6648 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6649 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6650 }
6651
6652 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6653 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6654 }
6655
6656 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6657 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6658 }
6659
6660 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6661 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6662 }
6663
6664 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6665 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6666 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6667 }
6668
6669 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6670 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6671 LOGD("fwk_videoStab = %d", fwk_videoStab);
6672 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6673 } else {
6674 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6675 // and so hardcoding the Video Stab result to OFF mode.
6676 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6677 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006678 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006679 }
6680
6681 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6682 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6683 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6684 }
6685
6686 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6687 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6688 }
6689
Thierry Strudel3d639192016-09-09 11:52:26 -07006690 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6691 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006692 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006693
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006694 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6695 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006696
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006697 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006698 blackLevelAppliedPattern->cam_black_level[0],
6699 blackLevelAppliedPattern->cam_black_level[1],
6700 blackLevelAppliedPattern->cam_black_level[2],
6701 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006702 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6703 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006704
6705#ifndef USE_HAL_3_3
6706 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006707 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6708 // depth space.
6709 fwk_blackLevelInd[0] /= 4.0;
6710 fwk_blackLevelInd[1] /= 4.0;
6711 fwk_blackLevelInd[2] /= 4.0;
6712 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006713 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6714 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006715#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006716 }
6717
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006718#ifndef USE_HAL_3_3
6719 // Fixed whitelevel is used by ISP/Sensor
6720 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6721 &gCamCapability[mCameraId]->white_level, 1);
6722#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006723
6724 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6725 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6726 int32_t scalerCropRegion[4];
6727 scalerCropRegion[0] = hScalerCropRegion->left;
6728 scalerCropRegion[1] = hScalerCropRegion->top;
6729 scalerCropRegion[2] = hScalerCropRegion->width;
6730 scalerCropRegion[3] = hScalerCropRegion->height;
6731
6732 // Adjust crop region from sensor output coordinate system to active
6733 // array coordinate system.
6734 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6735 scalerCropRegion[2], scalerCropRegion[3]);
6736
6737 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6738 }
6739
6740 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6741 LOGD("sensorExpTime = %lld", *sensorExpTime);
6742 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6743 }
6744
6745 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6746 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6747 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6748 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6749 }
6750
6751 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6752 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6753 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6754 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6755 sensorRollingShutterSkew, 1);
6756 }
6757
6758 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6759 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6760 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6761
6762 //calculate the noise profile based on sensitivity
6763 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6764 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6765 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6766 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6767 noise_profile[i] = noise_profile_S;
6768 noise_profile[i+1] = noise_profile_O;
6769 }
6770 LOGD("noise model entry (S, O) is (%f, %f)",
6771 noise_profile_S, noise_profile_O);
6772 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6773 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6774 }
6775
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006776#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006777 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006778 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006779 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006780 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006781 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6782 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6783 }
6784 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006785#endif
6786
Thierry Strudel3d639192016-09-09 11:52:26 -07006787 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6788 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6789 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6790 }
6791
6792 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6793 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6794 *faceDetectMode);
6795 if (NAME_NOT_FOUND != val) {
6796 uint8_t fwk_faceDetectMode = (uint8_t)val;
6797 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6798
6799 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6800 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6801 CAM_INTF_META_FACE_DETECTION, metadata) {
6802 uint8_t numFaces = MIN(
6803 faceDetectionInfo->num_faces_detected, MAX_ROI);
6804 int32_t faceIds[MAX_ROI];
6805 uint8_t faceScores[MAX_ROI];
6806 int32_t faceRectangles[MAX_ROI * 4];
6807 int32_t faceLandmarks[MAX_ROI * 6];
6808 size_t j = 0, k = 0;
6809
6810 for (size_t i = 0; i < numFaces; i++) {
6811 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6812 // Adjust crop region from sensor output coordinate system to active
6813 // array coordinate system.
6814 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6815 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6816 rect.width, rect.height);
6817
6818 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6819 faceRectangles+j, -1);
6820
6821 j+= 4;
6822 }
6823 if (numFaces <= 0) {
6824 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6825 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6826 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6827 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6828 }
6829
6830 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6831 numFaces);
6832 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6833 faceRectangles, numFaces * 4U);
6834 if (fwk_faceDetectMode ==
6835 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6836 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6837 CAM_INTF_META_FACE_LANDMARK, metadata) {
6838
6839 for (size_t i = 0; i < numFaces; i++) {
6840 // Map the co-ordinate sensor output coordinate system to active
6841 // array coordinate system.
6842 mCropRegionMapper.toActiveArray(
6843 landmarks->face_landmarks[i].left_eye_center.x,
6844 landmarks->face_landmarks[i].left_eye_center.y);
6845 mCropRegionMapper.toActiveArray(
6846 landmarks->face_landmarks[i].right_eye_center.x,
6847 landmarks->face_landmarks[i].right_eye_center.y);
6848 mCropRegionMapper.toActiveArray(
6849 landmarks->face_landmarks[i].mouth_center.x,
6850 landmarks->face_landmarks[i].mouth_center.y);
6851
6852 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006853 k+= TOTAL_LANDMARK_INDICES;
6854 }
6855 } else {
6856 for (size_t i = 0; i < numFaces; i++) {
6857 setInvalidLandmarks(faceLandmarks+k);
6858 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006859 }
6860 }
6861
6862 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6863 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6864 faceLandmarks, numFaces * 6U);
6865 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006866 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6867 CAM_INTF_META_FACE_BLINK, metadata) {
6868 uint8_t detected[MAX_ROI];
6869 uint8_t degree[MAX_ROI * 2];
6870 for (size_t i = 0; i < numFaces; i++) {
6871 detected[i] = blinks->blink[i].blink_detected;
6872 degree[2 * i] = blinks->blink[i].left_blink;
6873 degree[2 * i + 1] = blinks->blink[i].right_blink;
6874 }
6875 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6876 detected, numFaces);
6877 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6878 degree, numFaces * 2);
6879 }
6880 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6881 CAM_INTF_META_FACE_SMILE, metadata) {
6882 uint8_t degree[MAX_ROI];
6883 uint8_t confidence[MAX_ROI];
6884 for (size_t i = 0; i < numFaces; i++) {
6885 degree[i] = smiles->smile[i].smile_degree;
6886 confidence[i] = smiles->smile[i].smile_confidence;
6887 }
6888 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6889 degree, numFaces);
6890 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6891 confidence, numFaces);
6892 }
6893 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6894 CAM_INTF_META_FACE_GAZE, metadata) {
6895 int8_t angle[MAX_ROI];
6896 int32_t direction[MAX_ROI * 3];
6897 int8_t degree[MAX_ROI * 2];
6898 for (size_t i = 0; i < numFaces; i++) {
6899 angle[i] = gazes->gaze[i].gaze_angle;
6900 direction[3 * i] = gazes->gaze[i].updown_dir;
6901 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6902 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6903 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6904 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6905 }
6906 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6907 (uint8_t *)angle, numFaces);
6908 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6909 direction, numFaces * 3);
6910 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6911 (uint8_t *)degree, numFaces * 2);
6912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006913 }
6914 }
6915 }
6916 }
6917
6918 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6919 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006920 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006921
Thierry Strudel54dc9782017-02-15 12:12:10 -08006922 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006923 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6924 // process histogram statistics info
Thierry Strudel54dc9782017-02-15 12:12:10 -08006925 uint32_t hist_buf[4][CAM_HISTOGRAM_STATS_SIZE];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006926 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006927 cam_histogram_data_t rHistData, grHistData, gbHistData, bHistData;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006928 memset(&rHistData, 0, sizeof(rHistData));
Thierry Strudel54dc9782017-02-15 12:12:10 -08006929 memset(&grHistData, 0, sizeof(grHistData));
6930 memset(&gbHistData, 0, sizeof(gbHistData));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006931 memset(&bHistData, 0, sizeof(bHistData));
6932
6933 switch (stats_data->type) {
6934 case CAM_HISTOGRAM_TYPE_BAYER:
6935 switch (stats_data->bayer_stats.data_type) {
6936 case CAM_STATS_CHANNEL_GR:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006937 rHistData = grHistData = gbHistData = bHistData =
6938 stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006939 break;
6940 case CAM_STATS_CHANNEL_GB:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006941 rHistData = grHistData = gbHistData = bHistData =
6942 stats_data->bayer_stats.gb_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006943 break;
6944 case CAM_STATS_CHANNEL_B:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006945 rHistData = grHistData = gbHistData = bHistData =
6946 stats_data->bayer_stats.b_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006947 break;
6948 case CAM_STATS_CHANNEL_ALL:
6949 rHistData = stats_data->bayer_stats.r_stats;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006950 gbHistData = stats_data->bayer_stats.gb_stats;
6951 grHistData = stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006952 bHistData = stats_data->bayer_stats.b_stats;
6953 break;
6954 case CAM_STATS_CHANNEL_Y:
6955 case CAM_STATS_CHANNEL_R:
6956 default:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006957 rHistData = grHistData = gbHistData = bHistData =
6958 stats_data->bayer_stats.r_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006959 break;
6960 }
6961 break;
6962 case CAM_HISTOGRAM_TYPE_YUV:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006963 rHistData = grHistData = gbHistData = bHistData =
6964 stats_data->yuv_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006965 break;
6966 }
6967
6968 memcpy(hist_buf, rHistData.hist_buf, hist_size);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006969 memcpy(hist_buf[1], gbHistData.hist_buf, hist_size);
6970 memcpy(hist_buf[2], grHistData.hist_buf, hist_size);
6971 memcpy(hist_buf[3], bHistData.hist_buf, hist_size);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006972
Thierry Strudel54dc9782017-02-15 12:12:10 -08006973 camMetadata.update(QCAMERA3_HISTOGRAM_STATS, (int32_t*)hist_buf, hist_size*4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006974 }
6975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006976 }
6977
6978 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6979 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6980 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6981 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6982 }
6983
6984 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6985 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6986 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6987 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6988 }
6989
6990 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6991 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6992 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6993 CAM_MAX_SHADING_MAP_HEIGHT);
6994 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6995 CAM_MAX_SHADING_MAP_WIDTH);
6996 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6997 lensShadingMap->lens_shading, 4U * map_width * map_height);
6998 }
6999
7000 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7001 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7002 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7003 }
7004
7005 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7006 //Populate CAM_INTF_META_TONEMAP_CURVES
7007 /* ch0 = G, ch 1 = B, ch 2 = R*/
7008 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7009 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7010 tonemap->tonemap_points_cnt,
7011 CAM_MAX_TONEMAP_CURVE_SIZE);
7012 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7013 }
7014
7015 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7016 &tonemap->curves[0].tonemap_points[0][0],
7017 tonemap->tonemap_points_cnt * 2);
7018
7019 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7020 &tonemap->curves[1].tonemap_points[0][0],
7021 tonemap->tonemap_points_cnt * 2);
7022
7023 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7024 &tonemap->curves[2].tonemap_points[0][0],
7025 tonemap->tonemap_points_cnt * 2);
7026 }
7027
7028 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7029 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7030 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7031 CC_GAIN_MAX);
7032 }
7033
7034 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7035 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7036 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7037 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7038 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7039 }
7040
7041 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7042 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7043 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7044 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7045 toneCurve->tonemap_points_cnt,
7046 CAM_MAX_TONEMAP_CURVE_SIZE);
7047 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7048 }
7049 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7050 (float*)toneCurve->curve.tonemap_points,
7051 toneCurve->tonemap_points_cnt * 2);
7052 }
7053
7054 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7055 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7056 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7057 predColorCorrectionGains->gains, 4);
7058 }
7059
7060 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7061 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7062 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7063 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7064 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7065 }
7066
7067 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7068 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7069 }
7070
7071 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7072 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7073 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7074 }
7075
7076 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7077 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7078 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7079 }
7080
7081 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7082 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7083 *effectMode);
7084 if (NAME_NOT_FOUND != val) {
7085 uint8_t fwk_effectMode = (uint8_t)val;
7086 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7087 }
7088 }
7089
7090 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7091 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7092 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7093 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7094 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7095 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7096 }
7097 int32_t fwk_testPatternData[4];
7098 fwk_testPatternData[0] = testPatternData->r;
7099 fwk_testPatternData[3] = testPatternData->b;
7100 switch (gCamCapability[mCameraId]->color_arrangement) {
7101 case CAM_FILTER_ARRANGEMENT_RGGB:
7102 case CAM_FILTER_ARRANGEMENT_GRBG:
7103 fwk_testPatternData[1] = testPatternData->gr;
7104 fwk_testPatternData[2] = testPatternData->gb;
7105 break;
7106 case CAM_FILTER_ARRANGEMENT_GBRG:
7107 case CAM_FILTER_ARRANGEMENT_BGGR:
7108 fwk_testPatternData[2] = testPatternData->gr;
7109 fwk_testPatternData[1] = testPatternData->gb;
7110 break;
7111 default:
7112 LOGE("color arrangement %d is not supported",
7113 gCamCapability[mCameraId]->color_arrangement);
7114 break;
7115 }
7116 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7117 }
7118
7119 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7120 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7121 }
7122
7123 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7124 String8 str((const char *)gps_methods);
7125 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7126 }
7127
7128 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7129 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7130 }
7131
7132 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7133 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7134 }
7135
7136 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7137 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7138 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7139 }
7140
7141 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7142 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7143 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7144 }
7145
7146 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7147 int32_t fwk_thumb_size[2];
7148 fwk_thumb_size[0] = thumb_size->width;
7149 fwk_thumb_size[1] = thumb_size->height;
7150 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7151 }
7152
7153 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7154 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7155 privateData,
7156 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7157 }
7158
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007159 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007160 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007161 meteringMode, 1);
7162 }
7163
Thierry Strudel54dc9782017-02-15 12:12:10 -08007164 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7165 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7166 LOGD("hdr_scene_data: %d %f\n",
7167 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7168 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7169 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7170 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7171 &isHdr, 1);
7172 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7173 &isHdrConfidence, 1);
7174 }
7175
7176
7177
Thierry Strudel3d639192016-09-09 11:52:26 -07007178 if (metadata->is_tuning_params_valid) {
7179 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7180 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7181 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7182
7183
7184 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7185 sizeof(uint32_t));
7186 data += sizeof(uint32_t);
7187
7188 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7189 sizeof(uint32_t));
7190 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7191 data += sizeof(uint32_t);
7192
7193 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7194 sizeof(uint32_t));
7195 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7196 data += sizeof(uint32_t);
7197
7198 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7199 sizeof(uint32_t));
7200 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7201 data += sizeof(uint32_t);
7202
7203 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7204 sizeof(uint32_t));
7205 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7206 data += sizeof(uint32_t);
7207
7208 metadata->tuning_params.tuning_mod3_data_size = 0;
7209 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7210 sizeof(uint32_t));
7211 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7212 data += sizeof(uint32_t);
7213
7214 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7215 TUNING_SENSOR_DATA_MAX);
7216 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7217 count);
7218 data += count;
7219
7220 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7221 TUNING_VFE_DATA_MAX);
7222 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7223 count);
7224 data += count;
7225
7226 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7227 TUNING_CPP_DATA_MAX);
7228 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7229 count);
7230 data += count;
7231
7232 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7233 TUNING_CAC_DATA_MAX);
7234 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7235 count);
7236 data += count;
7237
7238 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7239 (int32_t *)(void *)tuning_meta_data_blob,
7240 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7241 }
7242
7243 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7244 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7245 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7246 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7247 NEUTRAL_COL_POINTS);
7248 }
7249
7250 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7251 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7252 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7253 }
7254
7255 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7256 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7257 // Adjust crop region from sensor output coordinate system to active
7258 // array coordinate system.
7259 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7260 hAeRegions->rect.width, hAeRegions->rect.height);
7261
7262 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7263 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7264 REGIONS_TUPLE_COUNT);
7265 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7266 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7267 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7268 hAeRegions->rect.height);
7269 }
7270
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007271 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7272 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7273 if (NAME_NOT_FOUND != val) {
7274 uint8_t fwkAfMode = (uint8_t)val;
7275 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7276 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7277 } else {
7278 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7279 val);
7280 }
7281 }
7282
Thierry Strudel3d639192016-09-09 11:52:26 -07007283 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7284 uint8_t fwk_afState = (uint8_t) *afState;
7285 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007286 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007287 }
7288
7289 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7290 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7291 }
7292
7293 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7294 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7295 }
7296
7297 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7298 uint8_t fwk_lensState = *lensState;
7299 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7300 }
7301
7302 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7303 /*af regions*/
7304 int32_t afRegions[REGIONS_TUPLE_COUNT];
7305 // Adjust crop region from sensor output coordinate system to active
7306 // array coordinate system.
7307 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7308 hAfRegions->rect.width, hAfRegions->rect.height);
7309
7310 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7311 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7312 REGIONS_TUPLE_COUNT);
7313 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7314 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7315 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7316 hAfRegions->rect.height);
7317 }
7318
7319 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007320 uint32_t ab_mode = *hal_ab_mode;
7321 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7322 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7323 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7324 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007325 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007326 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007327 if (NAME_NOT_FOUND != val) {
7328 uint8_t fwk_ab_mode = (uint8_t)val;
7329 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7330 }
7331 }
7332
7333 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7334 int val = lookupFwkName(SCENE_MODES_MAP,
7335 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7336 if (NAME_NOT_FOUND != val) {
7337 uint8_t fwkBestshotMode = (uint8_t)val;
7338 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7339 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7340 } else {
7341 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7342 }
7343 }
7344
7345 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7346 uint8_t fwk_mode = (uint8_t) *mode;
7347 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7348 }
7349
7350 /* Constant metadata values to be update*/
7351 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7352 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7353
7354 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7355 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7356
7357 int32_t hotPixelMap[2];
7358 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7359
7360 // CDS
7361 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7362 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7363 }
7364
Thierry Strudel04e026f2016-10-10 11:27:36 -07007365 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7366 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007367 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007368 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7369 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7370 } else {
7371 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7372 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007373
7374 if(fwk_hdr != curr_hdr_state) {
7375 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7376 if(fwk_hdr)
7377 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7378 else
7379 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7380 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007381 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7382 }
7383
Thierry Strudel54dc9782017-02-15 12:12:10 -08007384 //binning correction
7385 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7386 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7387 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7388 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7389 }
7390
Thierry Strudel04e026f2016-10-10 11:27:36 -07007391 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007392 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007393 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7394 int8_t is_ir_on = 0;
7395
7396 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7397 if(is_ir_on != curr_ir_state) {
7398 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7399 if(is_ir_on)
7400 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7401 else
7402 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7403 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007404 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007405 }
7406
Thierry Strudel269c81a2016-10-12 12:13:59 -07007407 // AEC SPEED
7408 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7409 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7410 }
7411
7412 // AWB SPEED
7413 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7414 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7415 }
7416
Thierry Strudel3d639192016-09-09 11:52:26 -07007417 // TNR
7418 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7419 uint8_t tnr_enable = tnr->denoise_enable;
7420 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007421 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7422 int8_t is_tnr_on = 0;
7423
7424 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7425 if(is_tnr_on != curr_tnr_state) {
7426 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7427 if(is_tnr_on)
7428 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7429 else
7430 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7431 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007432
7433 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7434 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7435 }
7436
7437 // Reprocess crop data
7438 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7439 uint8_t cnt = crop_data->num_of_streams;
7440 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7441 // mm-qcamera-daemon only posts crop_data for streams
7442 // not linked to pproc. So no valid crop metadata is not
7443 // necessarily an error case.
7444 LOGD("No valid crop metadata entries");
7445 } else {
7446 uint32_t reproc_stream_id;
7447 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7448 LOGD("No reprocessible stream found, ignore crop data");
7449 } else {
7450 int rc = NO_ERROR;
7451 Vector<int32_t> roi_map;
7452 int32_t *crop = new int32_t[cnt*4];
7453 if (NULL == crop) {
7454 rc = NO_MEMORY;
7455 }
7456 if (NO_ERROR == rc) {
7457 int32_t streams_found = 0;
7458 for (size_t i = 0; i < cnt; i++) {
7459 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7460 if (pprocDone) {
7461 // HAL already does internal reprocessing,
7462 // either via reprocessing before JPEG encoding,
7463 // or offline postprocessing for pproc bypass case.
7464 crop[0] = 0;
7465 crop[1] = 0;
7466 crop[2] = mInputStreamInfo.dim.width;
7467 crop[3] = mInputStreamInfo.dim.height;
7468 } else {
7469 crop[0] = crop_data->crop_info[i].crop.left;
7470 crop[1] = crop_data->crop_info[i].crop.top;
7471 crop[2] = crop_data->crop_info[i].crop.width;
7472 crop[3] = crop_data->crop_info[i].crop.height;
7473 }
7474 roi_map.add(crop_data->crop_info[i].roi_map.left);
7475 roi_map.add(crop_data->crop_info[i].roi_map.top);
7476 roi_map.add(crop_data->crop_info[i].roi_map.width);
7477 roi_map.add(crop_data->crop_info[i].roi_map.height);
7478 streams_found++;
7479 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7480 crop[0], crop[1], crop[2], crop[3]);
7481 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7482 crop_data->crop_info[i].roi_map.left,
7483 crop_data->crop_info[i].roi_map.top,
7484 crop_data->crop_info[i].roi_map.width,
7485 crop_data->crop_info[i].roi_map.height);
7486 break;
7487
7488 }
7489 }
7490 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7491 &streams_found, 1);
7492 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7493 crop, (size_t)(streams_found * 4));
7494 if (roi_map.array()) {
7495 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7496 roi_map.array(), roi_map.size());
7497 }
7498 }
7499 if (crop) {
7500 delete [] crop;
7501 }
7502 }
7503 }
7504 }
7505
7506 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7507 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7508 // so hardcoding the CAC result to OFF mode.
7509 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7510 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7511 } else {
7512 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7513 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7514 *cacMode);
7515 if (NAME_NOT_FOUND != val) {
7516 uint8_t resultCacMode = (uint8_t)val;
7517 // check whether CAC result from CB is equal to Framework set CAC mode
7518 // If not equal then set the CAC mode came in corresponding request
7519 if (fwk_cacMode != resultCacMode) {
7520 resultCacMode = fwk_cacMode;
7521 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007522 //Check if CAC is disabled by property
7523 if (m_cacModeDisabled) {
7524 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7525 }
7526
Thierry Strudel3d639192016-09-09 11:52:26 -07007527 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7528 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7529 } else {
7530 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7531 }
7532 }
7533 }
7534
7535 // Post blob of cam_cds_data through vendor tag.
7536 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7537 uint8_t cnt = cdsInfo->num_of_streams;
7538 cam_cds_data_t cdsDataOverride;
7539 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7540 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7541 cdsDataOverride.num_of_streams = 1;
7542 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7543 uint32_t reproc_stream_id;
7544 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7545 LOGD("No reprocessible stream found, ignore cds data");
7546 } else {
7547 for (size_t i = 0; i < cnt; i++) {
7548 if (cdsInfo->cds_info[i].stream_id ==
7549 reproc_stream_id) {
7550 cdsDataOverride.cds_info[0].cds_enable =
7551 cdsInfo->cds_info[i].cds_enable;
7552 break;
7553 }
7554 }
7555 }
7556 } else {
7557 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7558 }
7559 camMetadata.update(QCAMERA3_CDS_INFO,
7560 (uint8_t *)&cdsDataOverride,
7561 sizeof(cam_cds_data_t));
7562 }
7563
7564 // Ldaf calibration data
7565 if (!mLdafCalibExist) {
7566 IF_META_AVAILABLE(uint32_t, ldafCalib,
7567 CAM_INTF_META_LDAF_EXIF, metadata) {
7568 mLdafCalibExist = true;
7569 mLdafCalib[0] = ldafCalib[0];
7570 mLdafCalib[1] = ldafCalib[1];
7571 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7572 ldafCalib[0], ldafCalib[1]);
7573 }
7574 }
7575
Thierry Strudel54dc9782017-02-15 12:12:10 -08007576 // EXIF debug data through vendor tag
7577 /*
7578 * Mobicat Mask can assume 3 values:
7579 * 1 refers to Mobicat data,
7580 * 2 refers to Stats Debug and Exif Debug Data
7581 * 3 refers to Mobicat and Stats Debug Data
7582 * We want to make sure that we are sending Exif debug data
7583 * only when Mobicat Mask is 2.
7584 */
7585 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7586 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7587 (uint8_t *)(void *)mExifParams.debug_params,
7588 sizeof(mm_jpeg_debug_exif_params_t));
7589 }
7590
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007591 // Reprocess and DDM debug data through vendor tag
7592 cam_reprocess_info_t repro_info;
7593 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007594 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7595 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007596 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007597 }
7598 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7599 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007600 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007601 }
7602 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7603 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007604 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007605 }
7606 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7607 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007608 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 }
7610 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7611 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007612 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007613 }
7614 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007615 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007616 }
7617 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7618 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007619 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007620 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007621 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7622 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7623 }
7624 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7625 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7626 }
7627 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7628 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007629
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007630 // INSTANT AEC MODE
7631 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7632 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7633 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7634 }
7635
Shuzhen Wange763e802016-03-31 10:24:29 -07007636 // AF scene change
7637 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7638 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7639 }
7640
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007641 /* In batch mode, cache the first metadata in the batch */
7642 if (mBatchSize && firstMetadataInBatch) {
7643 mCachedMetadata.clear();
7644 mCachedMetadata = camMetadata;
7645 }
7646
Thierry Strudel3d639192016-09-09 11:52:26 -07007647 resultMetadata = camMetadata.release();
7648 return resultMetadata;
7649}
7650
7651/*===========================================================================
7652 * FUNCTION : saveExifParams
7653 *
7654 * DESCRIPTION:
7655 *
7656 * PARAMETERS :
7657 * @metadata : metadata information from callback
7658 *
7659 * RETURN : none
7660 *
7661 *==========================================================================*/
7662void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7663{
7664 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7665 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7666 if (mExifParams.debug_params) {
7667 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7668 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7669 }
7670 }
7671 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7672 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7673 if (mExifParams.debug_params) {
7674 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7675 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7676 }
7677 }
7678 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7679 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7680 if (mExifParams.debug_params) {
7681 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7682 mExifParams.debug_params->af_debug_params_valid = TRUE;
7683 }
7684 }
7685 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7686 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7687 if (mExifParams.debug_params) {
7688 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7689 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7690 }
7691 }
7692 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7693 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7694 if (mExifParams.debug_params) {
7695 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7696 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7697 }
7698 }
7699 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7700 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7701 if (mExifParams.debug_params) {
7702 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7703 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7704 }
7705 }
7706 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7707 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7708 if (mExifParams.debug_params) {
7709 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7710 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7711 }
7712 }
7713 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7714 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7715 if (mExifParams.debug_params) {
7716 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7717 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7718 }
7719 }
7720}
7721
7722/*===========================================================================
7723 * FUNCTION : get3AExifParams
7724 *
7725 * DESCRIPTION:
7726 *
7727 * PARAMETERS : none
7728 *
7729 *
7730 * RETURN : mm_jpeg_exif_params_t
7731 *
7732 *==========================================================================*/
7733mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7734{
7735 return mExifParams;
7736}
7737
7738/*===========================================================================
7739 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7740 *
7741 * DESCRIPTION:
7742 *
7743 * PARAMETERS :
7744 * @metadata : metadata information from callback
7745 *
7746 * RETURN : camera_metadata_t*
7747 * metadata in a format specified by fwk
7748 *==========================================================================*/
7749camera_metadata_t*
7750QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7751 (metadata_buffer_t *metadata)
7752{
7753 CameraMetadata camMetadata;
7754 camera_metadata_t *resultMetadata;
7755
7756
7757 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7758 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7759 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7760 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7761 }
7762
7763 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7764 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7765 &aecTrigger->trigger, 1);
7766 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7767 &aecTrigger->trigger_id, 1);
7768 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7769 aecTrigger->trigger);
7770 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7771 aecTrigger->trigger_id);
7772 }
7773
7774 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7775 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7776 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7777 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7778 }
7779
Thierry Strudel3d639192016-09-09 11:52:26 -07007780 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7781 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7782 &af_trigger->trigger, 1);
7783 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7784 af_trigger->trigger);
7785 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7786 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7787 af_trigger->trigger_id);
7788 }
7789
7790 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7791 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7792 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7793 if (NAME_NOT_FOUND != val) {
7794 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7795 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7796 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7797 } else {
7798 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7799 }
7800 }
7801
7802 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7803 uint32_t aeMode = CAM_AE_MODE_MAX;
7804 int32_t flashMode = CAM_FLASH_MODE_MAX;
7805 int32_t redeye = -1;
7806 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7807 aeMode = *pAeMode;
7808 }
7809 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7810 flashMode = *pFlashMode;
7811 }
7812 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7813 redeye = *pRedeye;
7814 }
7815
7816 if (1 == redeye) {
7817 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7818 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7819 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7820 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7821 flashMode);
7822 if (NAME_NOT_FOUND != val) {
7823 fwk_aeMode = (uint8_t)val;
7824 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7825 } else {
7826 LOGE("Unsupported flash mode %d", flashMode);
7827 }
7828 } else if (aeMode == CAM_AE_MODE_ON) {
7829 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7830 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7831 } else if (aeMode == CAM_AE_MODE_OFF) {
7832 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7833 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7834 } else {
7835 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7836 "flashMode:%d, aeMode:%u!!!",
7837 redeye, flashMode, aeMode);
7838 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007839 if (mInstantAEC) {
7840 // Increment frame Idx count untill a bound reached for instant AEC.
7841 mInstantAecFrameIdxCount++;
7842 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7843 CAM_INTF_META_AEC_INFO, metadata) {
7844 LOGH("ae_params->settled = %d",ae_params->settled);
7845 // If AEC settled, or if number of frames reached bound value,
7846 // should reset instant AEC.
7847 if (ae_params->settled ||
7848 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7849 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7850 mInstantAEC = false;
7851 mResetInstantAEC = true;
7852 mInstantAecFrameIdxCount = 0;
7853 }
7854 }
7855 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007856 resultMetadata = camMetadata.release();
7857 return resultMetadata;
7858}
7859
7860/*===========================================================================
7861 * FUNCTION : dumpMetadataToFile
7862 *
7863 * DESCRIPTION: Dumps tuning metadata to file system
7864 *
7865 * PARAMETERS :
7866 * @meta : tuning metadata
7867 * @dumpFrameCount : current dump frame count
7868 * @enabled : Enable mask
7869 *
7870 *==========================================================================*/
7871void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7872 uint32_t &dumpFrameCount,
7873 bool enabled,
7874 const char *type,
7875 uint32_t frameNumber)
7876{
7877 //Some sanity checks
7878 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7879 LOGE("Tuning sensor data size bigger than expected %d: %d",
7880 meta.tuning_sensor_data_size,
7881 TUNING_SENSOR_DATA_MAX);
7882 return;
7883 }
7884
7885 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7886 LOGE("Tuning VFE data size bigger than expected %d: %d",
7887 meta.tuning_vfe_data_size,
7888 TUNING_VFE_DATA_MAX);
7889 return;
7890 }
7891
7892 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7893 LOGE("Tuning CPP data size bigger than expected %d: %d",
7894 meta.tuning_cpp_data_size,
7895 TUNING_CPP_DATA_MAX);
7896 return;
7897 }
7898
7899 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7900 LOGE("Tuning CAC data size bigger than expected %d: %d",
7901 meta.tuning_cac_data_size,
7902 TUNING_CAC_DATA_MAX);
7903 return;
7904 }
7905 //
7906
7907 if(enabled){
7908 char timeBuf[FILENAME_MAX];
7909 char buf[FILENAME_MAX];
7910 memset(buf, 0, sizeof(buf));
7911 memset(timeBuf, 0, sizeof(timeBuf));
7912 time_t current_time;
7913 struct tm * timeinfo;
7914 time (&current_time);
7915 timeinfo = localtime (&current_time);
7916 if (timeinfo != NULL) {
7917 strftime (timeBuf, sizeof(timeBuf),
7918 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7919 }
7920 String8 filePath(timeBuf);
7921 snprintf(buf,
7922 sizeof(buf),
7923 "%dm_%s_%d.bin",
7924 dumpFrameCount,
7925 type,
7926 frameNumber);
7927 filePath.append(buf);
7928 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7929 if (file_fd >= 0) {
7930 ssize_t written_len = 0;
7931 meta.tuning_data_version = TUNING_DATA_VERSION;
7932 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7933 written_len += write(file_fd, data, sizeof(uint32_t));
7934 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7935 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7936 written_len += write(file_fd, data, sizeof(uint32_t));
7937 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7938 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7939 written_len += write(file_fd, data, sizeof(uint32_t));
7940 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7941 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7942 written_len += write(file_fd, data, sizeof(uint32_t));
7943 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7944 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7945 written_len += write(file_fd, data, sizeof(uint32_t));
7946 meta.tuning_mod3_data_size = 0;
7947 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7948 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7949 written_len += write(file_fd, data, sizeof(uint32_t));
7950 size_t total_size = meta.tuning_sensor_data_size;
7951 data = (void *)((uint8_t *)&meta.data);
7952 written_len += write(file_fd, data, total_size);
7953 total_size = meta.tuning_vfe_data_size;
7954 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7955 written_len += write(file_fd, data, total_size);
7956 total_size = meta.tuning_cpp_data_size;
7957 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7958 written_len += write(file_fd, data, total_size);
7959 total_size = meta.tuning_cac_data_size;
7960 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7961 written_len += write(file_fd, data, total_size);
7962 close(file_fd);
7963 }else {
7964 LOGE("fail to open file for metadata dumping");
7965 }
7966 }
7967}
7968
7969/*===========================================================================
7970 * FUNCTION : cleanAndSortStreamInfo
7971 *
7972 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7973 * and sort them such that raw stream is at the end of the list
7974 * This is a workaround for camera daemon constraint.
7975 *
7976 * PARAMETERS : None
7977 *
7978 *==========================================================================*/
7979void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7980{
7981 List<stream_info_t *> newStreamInfo;
7982
7983 /*clean up invalid streams*/
7984 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7985 it != mStreamInfo.end();) {
7986 if(((*it)->status) == INVALID){
7987 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7988 delete channel;
7989 free(*it);
7990 it = mStreamInfo.erase(it);
7991 } else {
7992 it++;
7993 }
7994 }
7995
7996 // Move preview/video/callback/snapshot streams into newList
7997 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7998 it != mStreamInfo.end();) {
7999 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8000 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8001 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8002 newStreamInfo.push_back(*it);
8003 it = mStreamInfo.erase(it);
8004 } else
8005 it++;
8006 }
8007 // Move raw streams into newList
8008 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8009 it != mStreamInfo.end();) {
8010 newStreamInfo.push_back(*it);
8011 it = mStreamInfo.erase(it);
8012 }
8013
8014 mStreamInfo = newStreamInfo;
8015}
8016
8017/*===========================================================================
8018 * FUNCTION : extractJpegMetadata
8019 *
8020 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8021 * JPEG metadata is cached in HAL, and return as part of capture
8022 * result when metadata is returned from camera daemon.
8023 *
8024 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8025 * @request: capture request
8026 *
8027 *==========================================================================*/
8028void QCamera3HardwareInterface::extractJpegMetadata(
8029 CameraMetadata& jpegMetadata,
8030 const camera3_capture_request_t *request)
8031{
8032 CameraMetadata frame_settings;
8033 frame_settings = request->settings;
8034
8035 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8036 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8037 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8038 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8039
8040 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8041 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8042 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8043 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8044
8045 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8046 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8047 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8048 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8049
8050 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8051 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8052 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8053 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8054
8055 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8056 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8057 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8058 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8059
8060 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8061 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8062 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8063 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8064
8065 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8066 int32_t thumbnail_size[2];
8067 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8068 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8069 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8070 int32_t orientation =
8071 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008072 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008073 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8074 int32_t temp;
8075 temp = thumbnail_size[0];
8076 thumbnail_size[0] = thumbnail_size[1];
8077 thumbnail_size[1] = temp;
8078 }
8079 }
8080 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8081 thumbnail_size,
8082 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8083 }
8084
8085}
8086
8087/*===========================================================================
8088 * FUNCTION : convertToRegions
8089 *
8090 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8091 *
8092 * PARAMETERS :
8093 * @rect : cam_rect_t struct to convert
8094 * @region : int32_t destination array
8095 * @weight : if we are converting from cam_area_t, weight is valid
8096 * else weight = -1
8097 *
8098 *==========================================================================*/
8099void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8100 int32_t *region, int weight)
8101{
8102 region[0] = rect.left;
8103 region[1] = rect.top;
8104 region[2] = rect.left + rect.width;
8105 region[3] = rect.top + rect.height;
8106 if (weight > -1) {
8107 region[4] = weight;
8108 }
8109}
8110
8111/*===========================================================================
8112 * FUNCTION : convertFromRegions
8113 *
8114 * DESCRIPTION: helper method to convert from array to cam_rect_t
8115 *
8116 * PARAMETERS :
8117 * @rect : cam_rect_t struct to convert
8118 * @region : int32_t destination array
8119 * @weight : if we are converting from cam_area_t, weight is valid
8120 * else weight = -1
8121 *
8122 *==========================================================================*/
8123void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008124 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008125{
Thierry Strudel3d639192016-09-09 11:52:26 -07008126 int32_t x_min = frame_settings.find(tag).data.i32[0];
8127 int32_t y_min = frame_settings.find(tag).data.i32[1];
8128 int32_t x_max = frame_settings.find(tag).data.i32[2];
8129 int32_t y_max = frame_settings.find(tag).data.i32[3];
8130 roi.weight = frame_settings.find(tag).data.i32[4];
8131 roi.rect.left = x_min;
8132 roi.rect.top = y_min;
8133 roi.rect.width = x_max - x_min;
8134 roi.rect.height = y_max - y_min;
8135}
8136
8137/*===========================================================================
8138 * FUNCTION : resetIfNeededROI
8139 *
8140 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8141 * crop region
8142 *
8143 * PARAMETERS :
8144 * @roi : cam_area_t struct to resize
8145 * @scalerCropRegion : cam_crop_region_t region to compare against
8146 *
8147 *
8148 *==========================================================================*/
8149bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8150 const cam_crop_region_t* scalerCropRegion)
8151{
8152 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8153 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8154 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8155 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8156
8157 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8158 * without having this check the calculations below to validate if the roi
8159 * is inside scalar crop region will fail resulting in the roi not being
8160 * reset causing algorithm to continue to use stale roi window
8161 */
8162 if (roi->weight == 0) {
8163 return true;
8164 }
8165
8166 if ((roi_x_max < scalerCropRegion->left) ||
8167 // right edge of roi window is left of scalar crop's left edge
8168 (roi_y_max < scalerCropRegion->top) ||
8169 // bottom edge of roi window is above scalar crop's top edge
8170 (roi->rect.left > crop_x_max) ||
8171 // left edge of roi window is beyond(right) of scalar crop's right edge
8172 (roi->rect.top > crop_y_max)){
8173 // top edge of roi windo is above scalar crop's top edge
8174 return false;
8175 }
8176 if (roi->rect.left < scalerCropRegion->left) {
8177 roi->rect.left = scalerCropRegion->left;
8178 }
8179 if (roi->rect.top < scalerCropRegion->top) {
8180 roi->rect.top = scalerCropRegion->top;
8181 }
8182 if (roi_x_max > crop_x_max) {
8183 roi_x_max = crop_x_max;
8184 }
8185 if (roi_y_max > crop_y_max) {
8186 roi_y_max = crop_y_max;
8187 }
8188 roi->rect.width = roi_x_max - roi->rect.left;
8189 roi->rect.height = roi_y_max - roi->rect.top;
8190 return true;
8191}
8192
8193/*===========================================================================
8194 * FUNCTION : convertLandmarks
8195 *
8196 * DESCRIPTION: helper method to extract the landmarks from face detection info
8197 *
8198 * PARAMETERS :
8199 * @landmark_data : input landmark data to be converted
8200 * @landmarks : int32_t destination array
8201 *
8202 *
8203 *==========================================================================*/
8204void QCamera3HardwareInterface::convertLandmarks(
8205 cam_face_landmarks_info_t landmark_data,
8206 int32_t *landmarks)
8207{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008208 if (landmark_data.is_left_eye_valid) {
8209 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8210 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8211 } else {
8212 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8213 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8214 }
8215
8216 if (landmark_data.is_right_eye_valid) {
8217 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8218 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8219 } else {
8220 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8221 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8222 }
8223
8224 if (landmark_data.is_mouth_valid) {
8225 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8226 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8227 } else {
8228 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8229 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8230 }
8231}
8232
8233/*===========================================================================
8234 * FUNCTION : setInvalidLandmarks
8235 *
8236 * DESCRIPTION: helper method to set invalid landmarks
8237 *
8238 * PARAMETERS :
8239 * @landmarks : int32_t destination array
8240 *
8241 *
8242 *==========================================================================*/
8243void QCamera3HardwareInterface::setInvalidLandmarks(
8244 int32_t *landmarks)
8245{
8246 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8247 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8248 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8249 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8250 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8251 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008252}
8253
8254#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008255
8256/*===========================================================================
8257 * FUNCTION : getCapabilities
8258 *
8259 * DESCRIPTION: query camera capability from back-end
8260 *
8261 * PARAMETERS :
8262 * @ops : mm-interface ops structure
8263 * @cam_handle : camera handle for which we need capability
8264 *
8265 * RETURN : ptr type of capability structure
8266 * capability for success
8267 * NULL for failure
8268 *==========================================================================*/
8269cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8270 uint32_t cam_handle)
8271{
8272 int rc = NO_ERROR;
8273 QCamera3HeapMemory *capabilityHeap = NULL;
8274 cam_capability_t *cap_ptr = NULL;
8275
8276 if (ops == NULL) {
8277 LOGE("Invalid arguments");
8278 return NULL;
8279 }
8280
8281 capabilityHeap = new QCamera3HeapMemory(1);
8282 if (capabilityHeap == NULL) {
8283 LOGE("creation of capabilityHeap failed");
8284 return NULL;
8285 }
8286
8287 /* Allocate memory for capability buffer */
8288 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8289 if(rc != OK) {
8290 LOGE("No memory for cappability");
8291 goto allocate_failed;
8292 }
8293
8294 /* Map memory for capability buffer */
8295 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8296
8297 rc = ops->map_buf(cam_handle,
8298 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8299 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8300 if(rc < 0) {
8301 LOGE("failed to map capability buffer");
8302 rc = FAILED_TRANSACTION;
8303 goto map_failed;
8304 }
8305
8306 /* Query Capability */
8307 rc = ops->query_capability(cam_handle);
8308 if(rc < 0) {
8309 LOGE("failed to query capability");
8310 rc = FAILED_TRANSACTION;
8311 goto query_failed;
8312 }
8313
8314 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8315 if (cap_ptr == NULL) {
8316 LOGE("out of memory");
8317 rc = NO_MEMORY;
8318 goto query_failed;
8319 }
8320
8321 memset(cap_ptr, 0, sizeof(cam_capability_t));
8322 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8323
8324 int index;
8325 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8326 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8327 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8328 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8329 }
8330
8331query_failed:
8332 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8333map_failed:
8334 capabilityHeap->deallocate();
8335allocate_failed:
8336 delete capabilityHeap;
8337
8338 if (rc != NO_ERROR) {
8339 return NULL;
8340 } else {
8341 return cap_ptr;
8342 }
8343}
8344
Thierry Strudel3d639192016-09-09 11:52:26 -07008345/*===========================================================================
8346 * FUNCTION : initCapabilities
8347 *
8348 * DESCRIPTION: initialize camera capabilities in static data struct
8349 *
8350 * PARAMETERS :
8351 * @cameraId : camera Id
8352 *
8353 * RETURN : int32_t type of status
8354 * NO_ERROR -- success
8355 * none-zero failure code
8356 *==========================================================================*/
8357int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8358{
8359 int rc = 0;
8360 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008361 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008362
8363 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8364 if (rc) {
8365 LOGE("camera_open failed. rc = %d", rc);
8366 goto open_failed;
8367 }
8368 if (!cameraHandle) {
8369 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8370 goto open_failed;
8371 }
8372
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008373 handle = get_main_camera_handle(cameraHandle->camera_handle);
8374 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8375 if (gCamCapability[cameraId] == NULL) {
8376 rc = FAILED_TRANSACTION;
8377 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008378 }
8379
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008380 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008381 if (is_dual_camera_by_idx(cameraId)) {
8382 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8383 gCamCapability[cameraId]->aux_cam_cap =
8384 getCapabilities(cameraHandle->ops, handle);
8385 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8386 rc = FAILED_TRANSACTION;
8387 free(gCamCapability[cameraId]);
8388 goto failed_op;
8389 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008390
8391 // Copy the main camera capability to main_cam_cap struct
8392 gCamCapability[cameraId]->main_cam_cap =
8393 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8394 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8395 LOGE("out of memory");
8396 rc = NO_MEMORY;
8397 goto failed_op;
8398 }
8399 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8400 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008401 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008402failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008403 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8404 cameraHandle = NULL;
8405open_failed:
8406 return rc;
8407}
8408
8409/*==========================================================================
8410 * FUNCTION : get3Aversion
8411 *
8412 * DESCRIPTION: get the Q3A S/W version
8413 *
8414 * PARAMETERS :
8415 * @sw_version: Reference of Q3A structure which will hold version info upon
8416 * return
8417 *
8418 * RETURN : None
8419 *
8420 *==========================================================================*/
8421void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8422{
8423 if(gCamCapability[mCameraId])
8424 sw_version = gCamCapability[mCameraId]->q3a_version;
8425 else
8426 LOGE("Capability structure NULL!");
8427}
8428
8429
8430/*===========================================================================
8431 * FUNCTION : initParameters
8432 *
8433 * DESCRIPTION: initialize camera parameters
8434 *
8435 * PARAMETERS :
8436 *
8437 * RETURN : int32_t type of status
8438 * NO_ERROR -- success
8439 * none-zero failure code
8440 *==========================================================================*/
8441int QCamera3HardwareInterface::initParameters()
8442{
8443 int rc = 0;
8444
8445 //Allocate Set Param Buffer
8446 mParamHeap = new QCamera3HeapMemory(1);
8447 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8448 if(rc != OK) {
8449 rc = NO_MEMORY;
8450 LOGE("Failed to allocate SETPARM Heap memory");
8451 delete mParamHeap;
8452 mParamHeap = NULL;
8453 return rc;
8454 }
8455
8456 //Map memory for parameters buffer
8457 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8458 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8459 mParamHeap->getFd(0),
8460 sizeof(metadata_buffer_t),
8461 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8462 if(rc < 0) {
8463 LOGE("failed to map SETPARM buffer");
8464 rc = FAILED_TRANSACTION;
8465 mParamHeap->deallocate();
8466 delete mParamHeap;
8467 mParamHeap = NULL;
8468 return rc;
8469 }
8470
8471 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8472
8473 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8474 return rc;
8475}
8476
8477/*===========================================================================
8478 * FUNCTION : deinitParameters
8479 *
8480 * DESCRIPTION: de-initialize camera parameters
8481 *
8482 * PARAMETERS :
8483 *
8484 * RETURN : NONE
8485 *==========================================================================*/
8486void QCamera3HardwareInterface::deinitParameters()
8487{
8488 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8489 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8490
8491 mParamHeap->deallocate();
8492 delete mParamHeap;
8493 mParamHeap = NULL;
8494
8495 mParameters = NULL;
8496
8497 free(mPrevParameters);
8498 mPrevParameters = NULL;
8499}
8500
8501/*===========================================================================
8502 * FUNCTION : calcMaxJpegSize
8503 *
8504 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8505 *
8506 * PARAMETERS :
8507 *
8508 * RETURN : max_jpeg_size
8509 *==========================================================================*/
8510size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8511{
8512 size_t max_jpeg_size = 0;
8513 size_t temp_width, temp_height;
8514 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8515 MAX_SIZES_CNT);
8516 for (size_t i = 0; i < count; i++) {
8517 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8518 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8519 if (temp_width * temp_height > max_jpeg_size ) {
8520 max_jpeg_size = temp_width * temp_height;
8521 }
8522 }
8523 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8524 return max_jpeg_size;
8525}
8526
8527/*===========================================================================
8528 * FUNCTION : getMaxRawSize
8529 *
8530 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8531 *
8532 * PARAMETERS :
8533 *
8534 * RETURN : Largest supported Raw Dimension
8535 *==========================================================================*/
8536cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8537{
8538 int max_width = 0;
8539 cam_dimension_t maxRawSize;
8540
8541 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8542 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8543 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8544 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8545 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8546 }
8547 }
8548 return maxRawSize;
8549}
8550
8551
8552/*===========================================================================
8553 * FUNCTION : calcMaxJpegDim
8554 *
8555 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8556 *
8557 * PARAMETERS :
8558 *
8559 * RETURN : max_jpeg_dim
8560 *==========================================================================*/
8561cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8562{
8563 cam_dimension_t max_jpeg_dim;
8564 cam_dimension_t curr_jpeg_dim;
8565 max_jpeg_dim.width = 0;
8566 max_jpeg_dim.height = 0;
8567 curr_jpeg_dim.width = 0;
8568 curr_jpeg_dim.height = 0;
8569 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8570 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8571 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8572 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8573 max_jpeg_dim.width * max_jpeg_dim.height ) {
8574 max_jpeg_dim.width = curr_jpeg_dim.width;
8575 max_jpeg_dim.height = curr_jpeg_dim.height;
8576 }
8577 }
8578 return max_jpeg_dim;
8579}
8580
8581/*===========================================================================
8582 * FUNCTION : addStreamConfig
8583 *
8584 * DESCRIPTION: adds the stream configuration to the array
8585 *
8586 * PARAMETERS :
8587 * @available_stream_configs : pointer to stream configuration array
8588 * @scalar_format : scalar format
8589 * @dim : configuration dimension
8590 * @config_type : input or output configuration type
8591 *
8592 * RETURN : NONE
8593 *==========================================================================*/
8594void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8595 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8596{
8597 available_stream_configs.add(scalar_format);
8598 available_stream_configs.add(dim.width);
8599 available_stream_configs.add(dim.height);
8600 available_stream_configs.add(config_type);
8601}
8602
8603/*===========================================================================
8604 * FUNCTION : suppportBurstCapture
8605 *
8606 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8607 *
8608 * PARAMETERS :
8609 * @cameraId : camera Id
8610 *
8611 * RETURN : true if camera supports BURST_CAPTURE
8612 * false otherwise
8613 *==========================================================================*/
8614bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8615{
8616 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8617 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8618 const int32_t highResWidth = 3264;
8619 const int32_t highResHeight = 2448;
8620
8621 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8622 // Maximum resolution images cannot be captured at >= 10fps
8623 // -> not supporting BURST_CAPTURE
8624 return false;
8625 }
8626
8627 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8628 // Maximum resolution images can be captured at >= 20fps
8629 // --> supporting BURST_CAPTURE
8630 return true;
8631 }
8632
8633 // Find the smallest highRes resolution, or largest resolution if there is none
8634 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8635 MAX_SIZES_CNT);
8636 size_t highRes = 0;
8637 while ((highRes + 1 < totalCnt) &&
8638 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8639 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8640 highResWidth * highResHeight)) {
8641 highRes++;
8642 }
8643 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8644 return true;
8645 } else {
8646 return false;
8647 }
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : initStaticMetadata
8652 *
8653 * DESCRIPTION: initialize the static metadata
8654 *
8655 * PARAMETERS :
8656 * @cameraId : camera Id
8657 *
8658 * RETURN : int32_t type of status
8659 * 0 -- success
8660 * non-zero failure code
8661 *==========================================================================*/
8662int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8663{
8664 int rc = 0;
8665 CameraMetadata staticInfo;
8666 size_t count = 0;
8667 bool limitedDevice = false;
8668 char prop[PROPERTY_VALUE_MAX];
8669 bool supportBurst = false;
8670
8671 supportBurst = supportBurstCapture(cameraId);
8672
8673 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8674 * guaranteed or if min fps of max resolution is less than 20 fps, its
8675 * advertised as limited device*/
8676 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8677 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8678 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8679 !supportBurst;
8680
8681 uint8_t supportedHwLvl = limitedDevice ?
8682 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008683#ifndef USE_HAL_3_3
8684 // LEVEL_3 - This device will support level 3.
8685 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8686#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008687 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008688#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008689
8690 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8691 &supportedHwLvl, 1);
8692
8693 bool facingBack = false;
8694 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8695 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8696 facingBack = true;
8697 }
8698 /*HAL 3 only*/
8699 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8700 &gCamCapability[cameraId]->min_focus_distance, 1);
8701
8702 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8703 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8704
8705 /*should be using focal lengths but sensor doesn't provide that info now*/
8706 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8707 &gCamCapability[cameraId]->focal_length,
8708 1);
8709
8710 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8711 gCamCapability[cameraId]->apertures,
8712 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8713
8714 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8715 gCamCapability[cameraId]->filter_densities,
8716 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8717
8718
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008719 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8720 size_t mode_count =
8721 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8722 for (size_t i = 0; i < mode_count; i++) {
8723 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8724 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008725 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008726 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008727
8728 int32_t lens_shading_map_size[] = {
8729 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8730 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8731 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8732 lens_shading_map_size,
8733 sizeof(lens_shading_map_size)/sizeof(int32_t));
8734
8735 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8736 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8737
8738 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8739 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8740
8741 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8742 &gCamCapability[cameraId]->max_frame_duration, 1);
8743
8744 camera_metadata_rational baseGainFactor = {
8745 gCamCapability[cameraId]->base_gain_factor.numerator,
8746 gCamCapability[cameraId]->base_gain_factor.denominator};
8747 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8748 &baseGainFactor, 1);
8749
8750 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8751 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8752
8753 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8754 gCamCapability[cameraId]->pixel_array_size.height};
8755 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8756 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8757
8758 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8759 gCamCapability[cameraId]->active_array_size.top,
8760 gCamCapability[cameraId]->active_array_size.width,
8761 gCamCapability[cameraId]->active_array_size.height};
8762 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8763 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8764
8765 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8766 &gCamCapability[cameraId]->white_level, 1);
8767
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008768 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8769 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8770 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008771 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008772 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008773
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008774#ifndef USE_HAL_3_3
8775 bool hasBlackRegions = false;
8776 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8777 LOGW("black_region_count: %d is bounded to %d",
8778 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8779 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8780 }
8781 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8782 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8783 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8784 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8785 }
8786 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8787 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8788 hasBlackRegions = true;
8789 }
8790#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008791 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8792 &gCamCapability[cameraId]->flash_charge_duration, 1);
8793
8794 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8795 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8796
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008797 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8798 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8799 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008800 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8801 &timestampSource, 1);
8802
Thierry Strudel54dc9782017-02-15 12:12:10 -08008803 //update histogram vendor data
8804 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008805 &gCamCapability[cameraId]->histogram_size, 1);
8806
Thierry Strudel54dc9782017-02-15 12:12:10 -08008807 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008808 &gCamCapability[cameraId]->max_histogram_count, 1);
8809
8810 int32_t sharpness_map_size[] = {
8811 gCamCapability[cameraId]->sharpness_map_size.width,
8812 gCamCapability[cameraId]->sharpness_map_size.height};
8813
8814 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8815 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8816
8817 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8818 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8819
8820 int32_t scalar_formats[] = {
8821 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8822 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8823 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8824 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8825 HAL_PIXEL_FORMAT_RAW10,
8826 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8827 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8828 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8829 scalar_formats,
8830 scalar_formats_count);
8831
8832 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8833 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8834 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8835 count, MAX_SIZES_CNT, available_processed_sizes);
8836 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8837 available_processed_sizes, count * 2);
8838
8839 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8840 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8841 makeTable(gCamCapability[cameraId]->raw_dim,
8842 count, MAX_SIZES_CNT, available_raw_sizes);
8843 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8844 available_raw_sizes, count * 2);
8845
8846 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8847 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8848 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8849 count, MAX_SIZES_CNT, available_fps_ranges);
8850 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8851 available_fps_ranges, count * 2);
8852
8853 camera_metadata_rational exposureCompensationStep = {
8854 gCamCapability[cameraId]->exp_compensation_step.numerator,
8855 gCamCapability[cameraId]->exp_compensation_step.denominator};
8856 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8857 &exposureCompensationStep, 1);
8858
8859 Vector<uint8_t> availableVstabModes;
8860 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8861 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008862 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008863 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008864 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008865 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008866 count = IS_TYPE_MAX;
8867 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8868 for (size_t i = 0; i < count; i++) {
8869 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8870 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8871 eisSupported = true;
8872 break;
8873 }
8874 }
8875 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008876 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8877 }
8878 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8879 availableVstabModes.array(), availableVstabModes.size());
8880
8881 /*HAL 1 and HAL 3 common*/
8882 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8883 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8884 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8885 float maxZoom = maxZoomStep/minZoomStep;
8886 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8887 &maxZoom, 1);
8888
8889 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8890 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8891
8892 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8893 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8894 max3aRegions[2] = 0; /* AF not supported */
8895 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8896 max3aRegions, 3);
8897
8898 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8899 memset(prop, 0, sizeof(prop));
8900 property_get("persist.camera.facedetect", prop, "1");
8901 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8902 LOGD("Support face detection mode: %d",
8903 supportedFaceDetectMode);
8904
8905 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008906 /* support mode should be OFF if max number of face is 0 */
8907 if (maxFaces <= 0) {
8908 supportedFaceDetectMode = 0;
8909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008910 Vector<uint8_t> availableFaceDetectModes;
8911 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8912 if (supportedFaceDetectMode == 1) {
8913 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8914 } else if (supportedFaceDetectMode == 2) {
8915 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8916 } else if (supportedFaceDetectMode == 3) {
8917 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8918 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8919 } else {
8920 maxFaces = 0;
8921 }
8922 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8923 availableFaceDetectModes.array(),
8924 availableFaceDetectModes.size());
8925 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8926 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008927 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8928 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8929 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008930
Emilian Peev7650c122017-01-19 08:24:33 -08008931#ifdef SUPPORT_DEPTH_DATA
Emilian Peev0ce959f2017-03-07 16:49:49 +00008932 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8933 //TODO: Update depth size accordingly, currently we use active array
8934 // as reference.
8935 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8936 int32_t depthHeight =
8937 gCamCapability[cameraId]->active_array_size.height;
8938 //As per spec. depth cloud should be sample count / 16
8939 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8940 assert(0 < depthSamplesCount);
8941 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
8942 &depthSamplesCount, 1);
Emilian Peev7650c122017-01-19 08:24:33 -08008943
Emilian Peev0ce959f2017-03-07 16:49:49 +00008944 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8945 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8946 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8947 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008948
Emilian Peev0ce959f2017-03-07 16:49:49 +00008949 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8950 1, 1 };
8951 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8952 depthMinDuration,
8953 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008954
Emilian Peev0ce959f2017-03-07 16:49:49 +00008955 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB,
8956 depthSamplesCount, 1, 0 };
8957 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8958 depthStallDuration,
8959 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008960
Emilian Peev0ce959f2017-03-07 16:49:49 +00008961 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8962 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8963 }
Emilian Peev7650c122017-01-19 08:24:33 -08008964#endif
8965
Thierry Strudel3d639192016-09-09 11:52:26 -07008966 int32_t exposureCompensationRange[] = {
8967 gCamCapability[cameraId]->exposure_compensation_min,
8968 gCamCapability[cameraId]->exposure_compensation_max};
8969 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8970 exposureCompensationRange,
8971 sizeof(exposureCompensationRange)/sizeof(int32_t));
8972
8973 uint8_t lensFacing = (facingBack) ?
8974 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8975 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8976
8977 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8978 available_thumbnail_sizes,
8979 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8980
8981 /*all sizes will be clubbed into this tag*/
8982 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8983 /*android.scaler.availableStreamConfigurations*/
8984 Vector<int32_t> available_stream_configs;
8985 cam_dimension_t active_array_dim;
8986 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8987 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08008988
8989 /*advertise list of input dimensions supported based on below property.
8990 By default all sizes upto 5MP will be advertised.
8991 Note that the setprop resolution format should be WxH.
8992 e.g: adb shell setprop persist.camera.input.minsize 1280x720
8993 To list all supported sizes, setprop needs to be set with "0x0" */
8994 cam_dimension_t minInputSize = {2592,1944}; //5MP
8995 memset(prop, 0, sizeof(prop));
8996 property_get("persist.camera.input.minsize", prop, "2592x1944");
8997 if (strlen(prop) > 0) {
8998 char *saveptr = NULL;
8999 char *token = strtok_r(prop, "x", &saveptr);
9000 if (token != NULL) {
9001 minInputSize.width = atoi(token);
9002 }
9003 token = strtok_r(NULL, "x", &saveptr);
9004 if (token != NULL) {
9005 minInputSize.height = atoi(token);
9006 }
9007 }
9008
Thierry Strudel3d639192016-09-09 11:52:26 -07009009 /* Add input/output stream configurations for each scalar formats*/
9010 for (size_t j = 0; j < scalar_formats_count; j++) {
9011 switch (scalar_formats[j]) {
9012 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9013 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9014 case HAL_PIXEL_FORMAT_RAW10:
9015 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9016 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9017 addStreamConfig(available_stream_configs, scalar_formats[j],
9018 gCamCapability[cameraId]->raw_dim[i],
9019 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9020 }
9021 break;
9022 case HAL_PIXEL_FORMAT_BLOB:
9023 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9024 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9025 addStreamConfig(available_stream_configs, scalar_formats[j],
9026 gCamCapability[cameraId]->picture_sizes_tbl[i],
9027 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9028 }
9029 break;
9030 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9031 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9032 default:
9033 cam_dimension_t largest_picture_size;
9034 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9035 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9036 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9037 addStreamConfig(available_stream_configs, scalar_formats[j],
9038 gCamCapability[cameraId]->picture_sizes_tbl[i],
9039 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009040 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9041 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9042 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9043 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9044 >= minInputSize.width) || (gCamCapability[cameraId]->
9045 picture_sizes_tbl[i].height >= minInputSize.height)) {
9046 addStreamConfig(available_stream_configs, scalar_formats[j],
9047 gCamCapability[cameraId]->picture_sizes_tbl[i],
9048 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9049 }
9050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009051 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009052
Thierry Strudel3d639192016-09-09 11:52:26 -07009053 break;
9054 }
9055 }
9056
9057 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9058 available_stream_configs.array(), available_stream_configs.size());
9059 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9060 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9061
9062 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9063 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9064
9065 /* android.scaler.availableMinFrameDurations */
9066 Vector<int64_t> available_min_durations;
9067 for (size_t j = 0; j < scalar_formats_count; j++) {
9068 switch (scalar_formats[j]) {
9069 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9070 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9071 case HAL_PIXEL_FORMAT_RAW10:
9072 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9073 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9074 available_min_durations.add(scalar_formats[j]);
9075 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9076 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9077 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9078 }
9079 break;
9080 default:
9081 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9082 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9083 available_min_durations.add(scalar_formats[j]);
9084 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9085 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9086 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9087 }
9088 break;
9089 }
9090 }
9091 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9092 available_min_durations.array(), available_min_durations.size());
9093
9094 Vector<int32_t> available_hfr_configs;
9095 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9096 int32_t fps = 0;
9097 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9098 case CAM_HFR_MODE_60FPS:
9099 fps = 60;
9100 break;
9101 case CAM_HFR_MODE_90FPS:
9102 fps = 90;
9103 break;
9104 case CAM_HFR_MODE_120FPS:
9105 fps = 120;
9106 break;
9107 case CAM_HFR_MODE_150FPS:
9108 fps = 150;
9109 break;
9110 case CAM_HFR_MODE_180FPS:
9111 fps = 180;
9112 break;
9113 case CAM_HFR_MODE_210FPS:
9114 fps = 210;
9115 break;
9116 case CAM_HFR_MODE_240FPS:
9117 fps = 240;
9118 break;
9119 case CAM_HFR_MODE_480FPS:
9120 fps = 480;
9121 break;
9122 case CAM_HFR_MODE_OFF:
9123 case CAM_HFR_MODE_MAX:
9124 default:
9125 break;
9126 }
9127
9128 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9129 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9130 /* For each HFR frame rate, need to advertise one variable fps range
9131 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9132 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9133 * set by the app. When video recording is started, [120, 120] is
9134 * set. This way sensor configuration does not change when recording
9135 * is started */
9136
9137 /* (width, height, fps_min, fps_max, batch_size_max) */
9138 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9139 j < MAX_SIZES_CNT; j++) {
9140 available_hfr_configs.add(
9141 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9142 available_hfr_configs.add(
9143 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9144 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9145 available_hfr_configs.add(fps);
9146 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9147
9148 /* (width, height, fps_min, fps_max, batch_size_max) */
9149 available_hfr_configs.add(
9150 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9151 available_hfr_configs.add(
9152 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9153 available_hfr_configs.add(fps);
9154 available_hfr_configs.add(fps);
9155 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9156 }
9157 }
9158 }
9159 //Advertise HFR capability only if the property is set
9160 memset(prop, 0, sizeof(prop));
9161 property_get("persist.camera.hal3hfr.enable", prop, "1");
9162 uint8_t hfrEnable = (uint8_t)atoi(prop);
9163
9164 if(hfrEnable && available_hfr_configs.array()) {
9165 staticInfo.update(
9166 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9167 available_hfr_configs.array(), available_hfr_configs.size());
9168 }
9169
9170 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9171 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9172 &max_jpeg_size, 1);
9173
9174 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9175 size_t size = 0;
9176 count = CAM_EFFECT_MODE_MAX;
9177 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9178 for (size_t i = 0; i < count; i++) {
9179 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9180 gCamCapability[cameraId]->supported_effects[i]);
9181 if (NAME_NOT_FOUND != val) {
9182 avail_effects[size] = (uint8_t)val;
9183 size++;
9184 }
9185 }
9186 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9187 avail_effects,
9188 size);
9189
9190 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9191 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9192 size_t supported_scene_modes_cnt = 0;
9193 count = CAM_SCENE_MODE_MAX;
9194 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9195 for (size_t i = 0; i < count; i++) {
9196 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9197 CAM_SCENE_MODE_OFF) {
9198 int val = lookupFwkName(SCENE_MODES_MAP,
9199 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9200 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009201
Thierry Strudel3d639192016-09-09 11:52:26 -07009202 if (NAME_NOT_FOUND != val) {
9203 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9204 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9205 supported_scene_modes_cnt++;
9206 }
9207 }
9208 }
9209 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9210 avail_scene_modes,
9211 supported_scene_modes_cnt);
9212
9213 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9214 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9215 supported_scene_modes_cnt,
9216 CAM_SCENE_MODE_MAX,
9217 scene_mode_overrides,
9218 supported_indexes,
9219 cameraId);
9220
9221 if (supported_scene_modes_cnt == 0) {
9222 supported_scene_modes_cnt = 1;
9223 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9224 }
9225
9226 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9227 scene_mode_overrides, supported_scene_modes_cnt * 3);
9228
9229 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9230 ANDROID_CONTROL_MODE_AUTO,
9231 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9232 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9233 available_control_modes,
9234 3);
9235
9236 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9237 size = 0;
9238 count = CAM_ANTIBANDING_MODE_MAX;
9239 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9240 for (size_t i = 0; i < count; i++) {
9241 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9242 gCamCapability[cameraId]->supported_antibandings[i]);
9243 if (NAME_NOT_FOUND != val) {
9244 avail_antibanding_modes[size] = (uint8_t)val;
9245 size++;
9246 }
9247
9248 }
9249 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9250 avail_antibanding_modes,
9251 size);
9252
9253 uint8_t avail_abberation_modes[] = {
9254 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9255 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9256 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9257 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9258 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9259 if (0 == count) {
9260 // If no aberration correction modes are available for a device, this advertise OFF mode
9261 size = 1;
9262 } else {
9263 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9264 // So, advertize all 3 modes if atleast any one mode is supported as per the
9265 // new M requirement
9266 size = 3;
9267 }
9268 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9269 avail_abberation_modes,
9270 size);
9271
9272 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9273 size = 0;
9274 count = CAM_FOCUS_MODE_MAX;
9275 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9276 for (size_t i = 0; i < count; i++) {
9277 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9278 gCamCapability[cameraId]->supported_focus_modes[i]);
9279 if (NAME_NOT_FOUND != val) {
9280 avail_af_modes[size] = (uint8_t)val;
9281 size++;
9282 }
9283 }
9284 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9285 avail_af_modes,
9286 size);
9287
9288 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9289 size = 0;
9290 count = CAM_WB_MODE_MAX;
9291 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9292 for (size_t i = 0; i < count; i++) {
9293 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9294 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9295 gCamCapability[cameraId]->supported_white_balances[i]);
9296 if (NAME_NOT_FOUND != val) {
9297 avail_awb_modes[size] = (uint8_t)val;
9298 size++;
9299 }
9300 }
9301 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9302 avail_awb_modes,
9303 size);
9304
9305 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9306 count = CAM_FLASH_FIRING_LEVEL_MAX;
9307 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9308 count);
9309 for (size_t i = 0; i < count; i++) {
9310 available_flash_levels[i] =
9311 gCamCapability[cameraId]->supported_firing_levels[i];
9312 }
9313 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9314 available_flash_levels, count);
9315
9316 uint8_t flashAvailable;
9317 if (gCamCapability[cameraId]->flash_available)
9318 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9319 else
9320 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9321 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9322 &flashAvailable, 1);
9323
9324 Vector<uint8_t> avail_ae_modes;
9325 count = CAM_AE_MODE_MAX;
9326 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9327 for (size_t i = 0; i < count; i++) {
9328 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9329 }
9330 if (flashAvailable) {
9331 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9332 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009333 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009334 }
9335 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9336 avail_ae_modes.array(),
9337 avail_ae_modes.size());
9338
9339 int32_t sensitivity_range[2];
9340 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9341 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9342 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9343 sensitivity_range,
9344 sizeof(sensitivity_range) / sizeof(int32_t));
9345
9346 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9347 &gCamCapability[cameraId]->max_analog_sensitivity,
9348 1);
9349
9350 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9351 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9352 &sensor_orientation,
9353 1);
9354
9355 int32_t max_output_streams[] = {
9356 MAX_STALLING_STREAMS,
9357 MAX_PROCESSED_STREAMS,
9358 MAX_RAW_STREAMS};
9359 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9360 max_output_streams,
9361 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9362
9363 uint8_t avail_leds = 0;
9364 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9365 &avail_leds, 0);
9366
9367 uint8_t focus_dist_calibrated;
9368 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9369 gCamCapability[cameraId]->focus_dist_calibrated);
9370 if (NAME_NOT_FOUND != val) {
9371 focus_dist_calibrated = (uint8_t)val;
9372 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9373 &focus_dist_calibrated, 1);
9374 }
9375
9376 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9377 size = 0;
9378 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9379 MAX_TEST_PATTERN_CNT);
9380 for (size_t i = 0; i < count; i++) {
9381 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9382 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9383 if (NAME_NOT_FOUND != testpatternMode) {
9384 avail_testpattern_modes[size] = testpatternMode;
9385 size++;
9386 }
9387 }
9388 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9389 avail_testpattern_modes,
9390 size);
9391
9392 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9393 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9394 &max_pipeline_depth,
9395 1);
9396
9397 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9398 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9399 &partial_result_count,
9400 1);
9401
9402 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9403 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9404
9405 Vector<uint8_t> available_capabilities;
9406 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9407 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9408 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9409 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9410 if (supportBurst) {
9411 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9412 }
9413 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9414 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9415 if (hfrEnable && available_hfr_configs.array()) {
9416 available_capabilities.add(
9417 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9418 }
9419
9420 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9421 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9422 }
9423 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9424 available_capabilities.array(),
9425 available_capabilities.size());
9426
9427 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9428 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9429 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9430 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9431
9432 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9433 &aeLockAvailable, 1);
9434
9435 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9436 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9437 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9438 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9439
9440 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9441 &awbLockAvailable, 1);
9442
9443 int32_t max_input_streams = 1;
9444 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9445 &max_input_streams,
9446 1);
9447
9448 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9449 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9450 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9451 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9452 HAL_PIXEL_FORMAT_YCbCr_420_888};
9453 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9454 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9455
9456 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9457 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9458 &max_latency,
9459 1);
9460
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009461#ifndef USE_HAL_3_3
9462 int32_t isp_sensitivity_range[2];
9463 isp_sensitivity_range[0] =
9464 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9465 isp_sensitivity_range[1] =
9466 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9467 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9468 isp_sensitivity_range,
9469 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9470#endif
9471
Thierry Strudel3d639192016-09-09 11:52:26 -07009472 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9473 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9474 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9475 available_hot_pixel_modes,
9476 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9477
9478 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9479 ANDROID_SHADING_MODE_FAST,
9480 ANDROID_SHADING_MODE_HIGH_QUALITY};
9481 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9482 available_shading_modes,
9483 3);
9484
9485 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9487 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9488 available_lens_shading_map_modes,
9489 2);
9490
9491 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9492 ANDROID_EDGE_MODE_FAST,
9493 ANDROID_EDGE_MODE_HIGH_QUALITY,
9494 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9495 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9496 available_edge_modes,
9497 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9498
9499 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9500 ANDROID_NOISE_REDUCTION_MODE_FAST,
9501 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9502 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9503 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9504 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9505 available_noise_red_modes,
9506 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9507
9508 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9509 ANDROID_TONEMAP_MODE_FAST,
9510 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9511 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9512 available_tonemap_modes,
9513 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9514
9515 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9516 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9517 available_hot_pixel_map_modes,
9518 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9519
9520 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9521 gCamCapability[cameraId]->reference_illuminant1);
9522 if (NAME_NOT_FOUND != val) {
9523 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9524 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9525 }
9526
9527 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9528 gCamCapability[cameraId]->reference_illuminant2);
9529 if (NAME_NOT_FOUND != val) {
9530 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9531 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9532 }
9533
9534 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9535 (void *)gCamCapability[cameraId]->forward_matrix1,
9536 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9537
9538 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9539 (void *)gCamCapability[cameraId]->forward_matrix2,
9540 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9541
9542 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9543 (void *)gCamCapability[cameraId]->color_transform1,
9544 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9545
9546 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9547 (void *)gCamCapability[cameraId]->color_transform2,
9548 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9549
9550 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9551 (void *)gCamCapability[cameraId]->calibration_transform1,
9552 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9553
9554 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9555 (void *)gCamCapability[cameraId]->calibration_transform2,
9556 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9557
9558 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9559 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9560 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9561 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9562 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9563 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9564 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9565 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9566 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9567 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9568 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9569 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9570 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9571 ANDROID_JPEG_GPS_COORDINATES,
9572 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9573 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9574 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9575 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9576 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9577 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9578 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9579 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9580 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9581 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009582#ifndef USE_HAL_3_3
9583 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9584#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009585 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009586 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009587 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9588 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009589 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009590 /* DevCamDebug metadata request_keys_basic */
9591 DEVCAMDEBUG_META_ENABLE,
9592 /* DevCamDebug metadata end */
9593 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009594
9595 size_t request_keys_cnt =
9596 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9597 Vector<int32_t> available_request_keys;
9598 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9599 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9600 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9601 }
9602
9603 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9604 available_request_keys.array(), available_request_keys.size());
9605
9606 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9607 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9608 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9609 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9610 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9611 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9612 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9613 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9614 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9615 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9616 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9617 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9618 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9619 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9620 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9621 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9622 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009623 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009624 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9625 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9626 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009627 ANDROID_STATISTICS_FACE_SCORES,
9628#ifndef USE_HAL_3_3
9629 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9630#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009631 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009632 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009633 // DevCamDebug metadata result_keys_basic
9634 DEVCAMDEBUG_META_ENABLE,
9635 // DevCamDebug metadata result_keys AF
9636 DEVCAMDEBUG_AF_LENS_POSITION,
9637 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9638 DEVCAMDEBUG_AF_TOF_DISTANCE,
9639 DEVCAMDEBUG_AF_LUMA,
9640 DEVCAMDEBUG_AF_HAF_STATE,
9641 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9642 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9643 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9644 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9645 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9646 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9647 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9648 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9649 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9650 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9651 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9652 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9653 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9654 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9655 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9656 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9657 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9658 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9659 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9660 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9661 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9662 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9663 // DevCamDebug metadata result_keys AEC
9664 DEVCAMDEBUG_AEC_TARGET_LUMA,
9665 DEVCAMDEBUG_AEC_COMP_LUMA,
9666 DEVCAMDEBUG_AEC_AVG_LUMA,
9667 DEVCAMDEBUG_AEC_CUR_LUMA,
9668 DEVCAMDEBUG_AEC_LINECOUNT,
9669 DEVCAMDEBUG_AEC_REAL_GAIN,
9670 DEVCAMDEBUG_AEC_EXP_INDEX,
9671 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009672 // DevCamDebug metadata result_keys zzHDR
9673 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9674 DEVCAMDEBUG_AEC_L_LINECOUNT,
9675 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9676 DEVCAMDEBUG_AEC_S_LINECOUNT,
9677 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9678 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9679 // DevCamDebug metadata result_keys ADRC
9680 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9681 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9682 DEVCAMDEBUG_AEC_GTM_RATIO,
9683 DEVCAMDEBUG_AEC_LTM_RATIO,
9684 DEVCAMDEBUG_AEC_LA_RATIO,
9685 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009686 // DevCamDebug metadata result_keys AWB
9687 DEVCAMDEBUG_AWB_R_GAIN,
9688 DEVCAMDEBUG_AWB_G_GAIN,
9689 DEVCAMDEBUG_AWB_B_GAIN,
9690 DEVCAMDEBUG_AWB_CCT,
9691 DEVCAMDEBUG_AWB_DECISION,
9692 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009693 };
9694
Thierry Strudel3d639192016-09-09 11:52:26 -07009695 size_t result_keys_cnt =
9696 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9697
9698 Vector<int32_t> available_result_keys;
9699 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9700 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9701 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9702 }
9703 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9704 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9705 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9706 }
9707 if (supportedFaceDetectMode == 1) {
9708 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9709 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9710 } else if ((supportedFaceDetectMode == 2) ||
9711 (supportedFaceDetectMode == 3)) {
9712 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9713 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9714 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009715#ifndef USE_HAL_3_3
9716 if (hasBlackRegions) {
9717 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9718 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9719 }
9720#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009721 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9722 available_result_keys.array(), available_result_keys.size());
9723
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009724 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009725 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9726 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9727 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9728 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9729 ANDROID_SCALER_CROPPING_TYPE,
9730 ANDROID_SYNC_MAX_LATENCY,
9731 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9732 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9733 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9734 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9735 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9736 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9737 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9738 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9739 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9740 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9741 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9742 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9743 ANDROID_LENS_FACING,
9744 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9745 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9746 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9747 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9748 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9749 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9750 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9751 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9752 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9753 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9754 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9755 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9756 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9757 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9758 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9759 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9760 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9761 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9762 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9763 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009764 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009765 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9766 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9767 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9768 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9769 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9770 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9771 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9772 ANDROID_CONTROL_AVAILABLE_MODES,
9773 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9774 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9775 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9776 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009777 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009778#ifdef SUPPORT_DEPTH_DATA
9779 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9780 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9781 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9782 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9783 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9784#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009785#ifndef USE_HAL_3_3
9786 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9787 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9788#endif
9789 };
9790
9791 Vector<int32_t> available_characteristics_keys;
9792 available_characteristics_keys.appendArray(characteristics_keys_basic,
9793 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9794#ifndef USE_HAL_3_3
9795 if (hasBlackRegions) {
9796 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9797 }
9798#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009799 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009800 available_characteristics_keys.array(),
9801 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009802
9803 /*available stall durations depend on the hw + sw and will be different for different devices */
9804 /*have to add for raw after implementation*/
9805 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9806 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9807
9808 Vector<int64_t> available_stall_durations;
9809 for (uint32_t j = 0; j < stall_formats_count; j++) {
9810 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9811 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9812 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9813 available_stall_durations.add(stall_formats[j]);
9814 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9815 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9816 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9817 }
9818 } else {
9819 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9820 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9821 available_stall_durations.add(stall_formats[j]);
9822 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9823 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9824 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9825 }
9826 }
9827 }
9828 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9829 available_stall_durations.array(),
9830 available_stall_durations.size());
9831
9832 //QCAMERA3_OPAQUE_RAW
9833 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9834 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9835 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9836 case LEGACY_RAW:
9837 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9838 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9839 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9840 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9841 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9842 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9843 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9844 break;
9845 case MIPI_RAW:
9846 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9847 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9848 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9849 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9850 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9851 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9852 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9853 break;
9854 default:
9855 LOGE("unknown opaque_raw_format %d",
9856 gCamCapability[cameraId]->opaque_raw_fmt);
9857 break;
9858 }
9859 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9860
9861 Vector<int32_t> strides;
9862 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9863 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9864 cam_stream_buf_plane_info_t buf_planes;
9865 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9866 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9867 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9868 &gCamCapability[cameraId]->padding_info, &buf_planes);
9869 strides.add(buf_planes.plane_info.mp[0].stride);
9870 }
9871 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9872 strides.size());
9873
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009874 //TBD: remove the following line once backend advertises zzHDR in feature mask
9875 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009876 //Video HDR default
9877 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9878 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009879 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009880 int32_t vhdr_mode[] = {
9881 QCAMERA3_VIDEO_HDR_MODE_OFF,
9882 QCAMERA3_VIDEO_HDR_MODE_ON};
9883
9884 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9885 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9886 vhdr_mode, vhdr_mode_count);
9887 }
9888
Thierry Strudel3d639192016-09-09 11:52:26 -07009889 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9890 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9891 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9892
9893 uint8_t isMonoOnly =
9894 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9895 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9896 &isMonoOnly, 1);
9897
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009898#ifndef USE_HAL_3_3
9899 Vector<int32_t> opaque_size;
9900 for (size_t j = 0; j < scalar_formats_count; j++) {
9901 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9902 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9903 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9904 cam_stream_buf_plane_info_t buf_planes;
9905
9906 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9907 &gCamCapability[cameraId]->padding_info, &buf_planes);
9908
9909 if (rc == 0) {
9910 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9911 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9912 opaque_size.add(buf_planes.plane_info.frame_len);
9913 }else {
9914 LOGE("raw frame calculation failed!");
9915 }
9916 }
9917 }
9918 }
9919
9920 if ((opaque_size.size() > 0) &&
9921 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9922 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9923 else
9924 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9925#endif
9926
Thierry Strudel04e026f2016-10-10 11:27:36 -07009927 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9928 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9929 size = 0;
9930 count = CAM_IR_MODE_MAX;
9931 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9932 for (size_t i = 0; i < count; i++) {
9933 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9934 gCamCapability[cameraId]->supported_ir_modes[i]);
9935 if (NAME_NOT_FOUND != val) {
9936 avail_ir_modes[size] = (int32_t)val;
9937 size++;
9938 }
9939 }
9940 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9941 avail_ir_modes, size);
9942 }
9943
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009944 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9945 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9946 size = 0;
9947 count = CAM_AEC_CONVERGENCE_MAX;
9948 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9949 for (size_t i = 0; i < count; i++) {
9950 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9951 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9952 if (NAME_NOT_FOUND != val) {
9953 available_instant_aec_modes[size] = (int32_t)val;
9954 size++;
9955 }
9956 }
9957 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9958 available_instant_aec_modes, size);
9959 }
9960
Thierry Strudel54dc9782017-02-15 12:12:10 -08009961 int32_t sharpness_range[] = {
9962 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9963 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9964 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9965
9966 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9967 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9968 size = 0;
9969 count = CAM_BINNING_CORRECTION_MODE_MAX;
9970 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9971 for (size_t i = 0; i < count; i++) {
9972 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9973 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9974 gCamCapability[cameraId]->supported_binning_modes[i]);
9975 if (NAME_NOT_FOUND != val) {
9976 avail_binning_modes[size] = (int32_t)val;
9977 size++;
9978 }
9979 }
9980 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9981 avail_binning_modes, size);
9982 }
9983
9984 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9985 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9986 size = 0;
9987 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9988 for (size_t i = 0; i < count; i++) {
9989 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9990 gCamCapability[cameraId]->supported_aec_modes[i]);
9991 if (NAME_NOT_FOUND != val)
9992 available_aec_modes[size++] = val;
9993 }
9994 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
9995 available_aec_modes, size);
9996 }
9997
9998 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
9999 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10000 size = 0;
10001 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10002 for (size_t i = 0; i < count; i++) {
10003 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10004 gCamCapability[cameraId]->supported_iso_modes[i]);
10005 if (NAME_NOT_FOUND != val)
10006 available_iso_modes[size++] = val;
10007 }
10008 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10009 available_iso_modes, size);
10010 }
10011
10012 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10013 for (size_t i = 0; i < count; i++)
10014 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10015 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10016 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10017
10018 int32_t available_saturation_range[4];
10019 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10020 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10021 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10022 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10023 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10024 available_saturation_range, 4);
10025
10026 uint8_t is_hdr_values[2];
10027 is_hdr_values[0] = 0;
10028 is_hdr_values[1] = 1;
10029 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10030 is_hdr_values, 2);
10031
10032 float is_hdr_confidence_range[2];
10033 is_hdr_confidence_range[0] = 0.0;
10034 is_hdr_confidence_range[1] = 1.0;
10035 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10036 is_hdr_confidence_range, 2);
10037
Thierry Strudel3d639192016-09-09 11:52:26 -070010038 gStaticMetadata[cameraId] = staticInfo.release();
10039 return rc;
10040}
10041
10042/*===========================================================================
10043 * FUNCTION : makeTable
10044 *
10045 * DESCRIPTION: make a table of sizes
10046 *
10047 * PARAMETERS :
10048 *
10049 *
10050 *==========================================================================*/
10051void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10052 size_t max_size, int32_t *sizeTable)
10053{
10054 size_t j = 0;
10055 if (size > max_size) {
10056 size = max_size;
10057 }
10058 for (size_t i = 0; i < size; i++) {
10059 sizeTable[j] = dimTable[i].width;
10060 sizeTable[j+1] = dimTable[i].height;
10061 j+=2;
10062 }
10063}
10064
10065/*===========================================================================
10066 * FUNCTION : makeFPSTable
10067 *
10068 * DESCRIPTION: make a table of fps ranges
10069 *
10070 * PARAMETERS :
10071 *
10072 *==========================================================================*/
10073void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10074 size_t max_size, int32_t *fpsRangesTable)
10075{
10076 size_t j = 0;
10077 if (size > max_size) {
10078 size = max_size;
10079 }
10080 for (size_t i = 0; i < size; i++) {
10081 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10082 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10083 j+=2;
10084 }
10085}
10086
10087/*===========================================================================
10088 * FUNCTION : makeOverridesList
10089 *
10090 * DESCRIPTION: make a list of scene mode overrides
10091 *
10092 * PARAMETERS :
10093 *
10094 *
10095 *==========================================================================*/
10096void QCamera3HardwareInterface::makeOverridesList(
10097 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10098 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10099{
10100 /*daemon will give a list of overrides for all scene modes.
10101 However we should send the fwk only the overrides for the scene modes
10102 supported by the framework*/
10103 size_t j = 0;
10104 if (size > max_size) {
10105 size = max_size;
10106 }
10107 size_t focus_count = CAM_FOCUS_MODE_MAX;
10108 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10109 focus_count);
10110 for (size_t i = 0; i < size; i++) {
10111 bool supt = false;
10112 size_t index = supported_indexes[i];
10113 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10114 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10115 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10116 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10117 overridesTable[index].awb_mode);
10118 if (NAME_NOT_FOUND != val) {
10119 overridesList[j+1] = (uint8_t)val;
10120 }
10121 uint8_t focus_override = overridesTable[index].af_mode;
10122 for (size_t k = 0; k < focus_count; k++) {
10123 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10124 supt = true;
10125 break;
10126 }
10127 }
10128 if (supt) {
10129 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10130 focus_override);
10131 if (NAME_NOT_FOUND != val) {
10132 overridesList[j+2] = (uint8_t)val;
10133 }
10134 } else {
10135 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10136 }
10137 j+=3;
10138 }
10139}
10140
10141/*===========================================================================
10142 * FUNCTION : filterJpegSizes
10143 *
10144 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10145 * could be downscaled to
10146 *
10147 * PARAMETERS :
10148 *
10149 * RETURN : length of jpegSizes array
10150 *==========================================================================*/
10151
10152size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10153 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10154 uint8_t downscale_factor)
10155{
10156 if (0 == downscale_factor) {
10157 downscale_factor = 1;
10158 }
10159
10160 int32_t min_width = active_array_size.width / downscale_factor;
10161 int32_t min_height = active_array_size.height / downscale_factor;
10162 size_t jpegSizesCnt = 0;
10163 if (processedSizesCnt > maxCount) {
10164 processedSizesCnt = maxCount;
10165 }
10166 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10167 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10168 jpegSizes[jpegSizesCnt] = processedSizes[i];
10169 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10170 jpegSizesCnt += 2;
10171 }
10172 }
10173 return jpegSizesCnt;
10174}
10175
10176/*===========================================================================
10177 * FUNCTION : computeNoiseModelEntryS
10178 *
10179 * DESCRIPTION: function to map a given sensitivity to the S noise
10180 * model parameters in the DNG noise model.
10181 *
10182 * PARAMETERS : sens : the sensor sensitivity
10183 *
10184 ** RETURN : S (sensor amplification) noise
10185 *
10186 *==========================================================================*/
10187double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10188 double s = gCamCapability[mCameraId]->gradient_S * sens +
10189 gCamCapability[mCameraId]->offset_S;
10190 return ((s < 0.0) ? 0.0 : s);
10191}
10192
10193/*===========================================================================
10194 * FUNCTION : computeNoiseModelEntryO
10195 *
10196 * DESCRIPTION: function to map a given sensitivity to the O noise
10197 * model parameters in the DNG noise model.
10198 *
10199 * PARAMETERS : sens : the sensor sensitivity
10200 *
10201 ** RETURN : O (sensor readout) noise
10202 *
10203 *==========================================================================*/
10204double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10205 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10206 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10207 1.0 : (1.0 * sens / max_analog_sens);
10208 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10209 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10210 return ((o < 0.0) ? 0.0 : o);
10211}
10212
10213/*===========================================================================
10214 * FUNCTION : getSensorSensitivity
10215 *
10216 * DESCRIPTION: convert iso_mode to an integer value
10217 *
10218 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10219 *
10220 ** RETURN : sensitivity supported by sensor
10221 *
10222 *==========================================================================*/
10223int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10224{
10225 int32_t sensitivity;
10226
10227 switch (iso_mode) {
10228 case CAM_ISO_MODE_100:
10229 sensitivity = 100;
10230 break;
10231 case CAM_ISO_MODE_200:
10232 sensitivity = 200;
10233 break;
10234 case CAM_ISO_MODE_400:
10235 sensitivity = 400;
10236 break;
10237 case CAM_ISO_MODE_800:
10238 sensitivity = 800;
10239 break;
10240 case CAM_ISO_MODE_1600:
10241 sensitivity = 1600;
10242 break;
10243 default:
10244 sensitivity = -1;
10245 break;
10246 }
10247 return sensitivity;
10248}
10249
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010250int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10251 if (gHdrPlusClient != nullptr) {
10252 return OK;
10253 }
10254
10255 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10256 if (gHdrPlusClient->isEaselPresentOnDevice()) {
10257 // If Easel is present, power on Easel and suspend it immediately.
10258 status_t res = gHdrPlusClient->powerOnEasel();
10259 if (res != OK) {
10260 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10261 gHdrPlusClient = nullptr;
10262 return res;
10263 }
10264
10265 res = gHdrPlusClient->suspendEasel();
10266 if (res != OK) {
10267 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10268 }
10269
10270 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10271 } else {
10272 // Destroy HDR+ client if Easel isn't present.
10273 gHdrPlusClient = nullptr;
10274 }
10275
10276 return OK;
10277}
10278
Thierry Strudel3d639192016-09-09 11:52:26 -070010279/*===========================================================================
10280 * FUNCTION : getCamInfo
10281 *
10282 * DESCRIPTION: query camera capabilities
10283 *
10284 * PARAMETERS :
10285 * @cameraId : camera Id
10286 * @info : camera info struct to be filled in with camera capabilities
10287 *
10288 * RETURN : int type of status
10289 * NO_ERROR -- success
10290 * none-zero failure code
10291 *==========================================================================*/
10292int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10293 struct camera_info *info)
10294{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010295 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010296 int rc = 0;
10297
10298 pthread_mutex_lock(&gCamLock);
10299 if (NULL == gCamCapability[cameraId]) {
10300 rc = initCapabilities(cameraId);
10301 if (rc < 0) {
10302 pthread_mutex_unlock(&gCamLock);
10303 return rc;
10304 }
10305 }
10306
10307 if (NULL == gStaticMetadata[cameraId]) {
10308 rc = initStaticMetadata(cameraId);
10309 if (rc < 0) {
10310 pthread_mutex_unlock(&gCamLock);
10311 return rc;
10312 }
10313 }
10314
10315 switch(gCamCapability[cameraId]->position) {
10316 case CAM_POSITION_BACK:
10317 case CAM_POSITION_BACK_AUX:
10318 info->facing = CAMERA_FACING_BACK;
10319 break;
10320
10321 case CAM_POSITION_FRONT:
10322 case CAM_POSITION_FRONT_AUX:
10323 info->facing = CAMERA_FACING_FRONT;
10324 break;
10325
10326 default:
10327 LOGE("Unknown position type %d for camera id:%d",
10328 gCamCapability[cameraId]->position, cameraId);
10329 rc = -1;
10330 break;
10331 }
10332
10333
10334 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010335#ifndef USE_HAL_3_3
10336 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10337#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010338 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010339#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010340 info->static_camera_characteristics = gStaticMetadata[cameraId];
10341
10342 //For now assume both cameras can operate independently.
10343 info->conflicting_devices = NULL;
10344 info->conflicting_devices_length = 0;
10345
10346 //resource cost is 100 * MIN(1.0, m/M),
10347 //where m is throughput requirement with maximum stream configuration
10348 //and M is CPP maximum throughput.
10349 float max_fps = 0.0;
10350 for (uint32_t i = 0;
10351 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10352 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10353 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10354 }
10355 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10356 gCamCapability[cameraId]->active_array_size.width *
10357 gCamCapability[cameraId]->active_array_size.height * max_fps /
10358 gCamCapability[cameraId]->max_pixel_bandwidth;
10359 info->resource_cost = 100 * MIN(1.0, ratio);
10360 LOGI("camera %d resource cost is %d", cameraId,
10361 info->resource_cost);
10362
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010363 rc = initHdrPlusClientLocked();
10364 if (rc != OK) {
10365 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10366 pthread_mutex_unlock(&gCamLock);
10367 return rc;
10368 }
10369
Thierry Strudel3d639192016-09-09 11:52:26 -070010370 pthread_mutex_unlock(&gCamLock);
10371 return rc;
10372}
10373
10374/*===========================================================================
10375 * FUNCTION : translateCapabilityToMetadata
10376 *
10377 * DESCRIPTION: translate the capability into camera_metadata_t
10378 *
10379 * PARAMETERS : type of the request
10380 *
10381 *
10382 * RETURN : success: camera_metadata_t*
10383 * failure: NULL
10384 *
10385 *==========================================================================*/
10386camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10387{
10388 if (mDefaultMetadata[type] != NULL) {
10389 return mDefaultMetadata[type];
10390 }
10391 //first time we are handling this request
10392 //fill up the metadata structure using the wrapper class
10393 CameraMetadata settings;
10394 //translate from cam_capability_t to camera_metadata_tag_t
10395 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10396 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10397 int32_t defaultRequestID = 0;
10398 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10399
10400 /* OIS disable */
10401 char ois_prop[PROPERTY_VALUE_MAX];
10402 memset(ois_prop, 0, sizeof(ois_prop));
10403 property_get("persist.camera.ois.disable", ois_prop, "0");
10404 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10405
10406 /* Force video to use OIS */
10407 char videoOisProp[PROPERTY_VALUE_MAX];
10408 memset(videoOisProp, 0, sizeof(videoOisProp));
10409 property_get("persist.camera.ois.video", videoOisProp, "1");
10410 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010411
10412 // Hybrid AE enable/disable
10413 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10414 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10415 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10416 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10417
Thierry Strudel3d639192016-09-09 11:52:26 -070010418 uint8_t controlIntent = 0;
10419 uint8_t focusMode;
10420 uint8_t vsMode;
10421 uint8_t optStabMode;
10422 uint8_t cacMode;
10423 uint8_t edge_mode;
10424 uint8_t noise_red_mode;
10425 uint8_t tonemap_mode;
10426 bool highQualityModeEntryAvailable = FALSE;
10427 bool fastModeEntryAvailable = FALSE;
10428 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10429 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010430 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010431
Thierry Strudel3d639192016-09-09 11:52:26 -070010432 switch (type) {
10433 case CAMERA3_TEMPLATE_PREVIEW:
10434 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10435 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10436 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10437 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10438 edge_mode = ANDROID_EDGE_MODE_FAST;
10439 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10440 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10441 break;
10442 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10443 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10444 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10445 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10446 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10447 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10448 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10449 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10450 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10451 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10452 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10453 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10454 highQualityModeEntryAvailable = TRUE;
10455 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10456 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10457 fastModeEntryAvailable = TRUE;
10458 }
10459 }
10460 if (highQualityModeEntryAvailable) {
10461 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10462 } else if (fastModeEntryAvailable) {
10463 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10464 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010465 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10466 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10467 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010468 break;
10469 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10470 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10471 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10472 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010473 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10474 edge_mode = ANDROID_EDGE_MODE_FAST;
10475 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10476 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10477 if (forceVideoOis)
10478 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10479 break;
10480 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10481 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10482 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10483 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010484 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10485 edge_mode = ANDROID_EDGE_MODE_FAST;
10486 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10487 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10488 if (forceVideoOis)
10489 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10490 break;
10491 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10492 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10493 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10494 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10495 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10496 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10497 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10498 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10499 break;
10500 case CAMERA3_TEMPLATE_MANUAL:
10501 edge_mode = ANDROID_EDGE_MODE_FAST;
10502 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10503 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10504 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10505 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10506 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10507 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10508 break;
10509 default:
10510 edge_mode = ANDROID_EDGE_MODE_FAST;
10511 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10512 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10513 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10514 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10515 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10516 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10517 break;
10518 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010519 // Set CAC to OFF if underlying device doesn't support
10520 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10521 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10522 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010523 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10524 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10525 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10526 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10527 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10528 }
10529 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
10530
10531 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10532 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10533 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10534 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10535 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10536 || ois_disable)
10537 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10538 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010539 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010540
10541 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10542 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10543
10544 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10545 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10546
10547 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10548 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10549
10550 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10551 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10552
10553 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10554 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10555
10556 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10557 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10558
10559 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10560 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10561
10562 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10563 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10564
10565 /*flash*/
10566 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10567 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10568
10569 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10570 settings.update(ANDROID_FLASH_FIRING_POWER,
10571 &flashFiringLevel, 1);
10572
10573 /* lens */
10574 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10575 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10576
10577 if (gCamCapability[mCameraId]->filter_densities_count) {
10578 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10579 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10580 gCamCapability[mCameraId]->filter_densities_count);
10581 }
10582
10583 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10584 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10585
Thierry Strudel3d639192016-09-09 11:52:26 -070010586 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10587 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10588
10589 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10590 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10591
10592 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10593 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10594
10595 /* face detection (default to OFF) */
10596 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10597 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10598
Thierry Strudel54dc9782017-02-15 12:12:10 -080010599 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10600 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010601
10602 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10603 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10604
10605 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10606 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10607
Thierry Strudel3d639192016-09-09 11:52:26 -070010608
10609 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10610 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10611
10612 /* Exposure time(Update the Min Exposure Time)*/
10613 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10614 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10615
10616 /* frame duration */
10617 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10618 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10619
10620 /* sensitivity */
10621 static const int32_t default_sensitivity = 100;
10622 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010623#ifndef USE_HAL_3_3
10624 static const int32_t default_isp_sensitivity =
10625 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10626 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10627#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010628
10629 /*edge mode*/
10630 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10631
10632 /*noise reduction mode*/
10633 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10634
10635 /*color correction mode*/
10636 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10637 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10638
10639 /*transform matrix mode*/
10640 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10641
10642 int32_t scaler_crop_region[4];
10643 scaler_crop_region[0] = 0;
10644 scaler_crop_region[1] = 0;
10645 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10646 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10647 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10648
10649 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10650 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10651
10652 /*focus distance*/
10653 float focus_distance = 0.0;
10654 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10655
10656 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010657 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010658 float max_range = 0.0;
10659 float max_fixed_fps = 0.0;
10660 int32_t fps_range[2] = {0, 0};
10661 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10662 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010663 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10664 TEMPLATE_MAX_PREVIEW_FPS) {
10665 continue;
10666 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010667 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10668 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10669 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10670 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10671 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10672 if (range > max_range) {
10673 fps_range[0] =
10674 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10675 fps_range[1] =
10676 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10677 max_range = range;
10678 }
10679 } else {
10680 if (range < 0.01 && max_fixed_fps <
10681 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10682 fps_range[0] =
10683 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10684 fps_range[1] =
10685 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10686 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10687 }
10688 }
10689 }
10690 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10691
10692 /*precapture trigger*/
10693 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10694 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10695
10696 /*af trigger*/
10697 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10698 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10699
10700 /* ae & af regions */
10701 int32_t active_region[] = {
10702 gCamCapability[mCameraId]->active_array_size.left,
10703 gCamCapability[mCameraId]->active_array_size.top,
10704 gCamCapability[mCameraId]->active_array_size.left +
10705 gCamCapability[mCameraId]->active_array_size.width,
10706 gCamCapability[mCameraId]->active_array_size.top +
10707 gCamCapability[mCameraId]->active_array_size.height,
10708 0};
10709 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10710 sizeof(active_region) / sizeof(active_region[0]));
10711 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10712 sizeof(active_region) / sizeof(active_region[0]));
10713
10714 /* black level lock */
10715 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10716 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10717
Thierry Strudel3d639192016-09-09 11:52:26 -070010718 //special defaults for manual template
10719 if (type == CAMERA3_TEMPLATE_MANUAL) {
10720 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10721 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10722
10723 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10724 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10725
10726 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10727 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10728
10729 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10730 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10731
10732 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10733 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10734
10735 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10736 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10737 }
10738
10739
10740 /* TNR
10741 * We'll use this location to determine which modes TNR will be set.
10742 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10743 * This is not to be confused with linking on a per stream basis that decision
10744 * is still on per-session basis and will be handled as part of config stream
10745 */
10746 uint8_t tnr_enable = 0;
10747
10748 if (m_bTnrPreview || m_bTnrVideo) {
10749
10750 switch (type) {
10751 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10752 tnr_enable = 1;
10753 break;
10754
10755 default:
10756 tnr_enable = 0;
10757 break;
10758 }
10759
10760 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10761 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10762 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10763
10764 LOGD("TNR:%d with process plate %d for template:%d",
10765 tnr_enable, tnr_process_type, type);
10766 }
10767
10768 //Update Link tags to default
10769 int32_t sync_type = CAM_TYPE_STANDALONE;
10770 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10771
10772 int32_t is_main = 0; //this doesn't matter as app should overwrite
10773 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10774
10775 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10776
10777 /* CDS default */
10778 char prop[PROPERTY_VALUE_MAX];
10779 memset(prop, 0, sizeof(prop));
10780 property_get("persist.camera.CDS", prop, "Auto");
10781 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10782 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10783 if (CAM_CDS_MODE_MAX == cds_mode) {
10784 cds_mode = CAM_CDS_MODE_AUTO;
10785 }
10786
10787 /* Disabling CDS in templates which have TNR enabled*/
10788 if (tnr_enable)
10789 cds_mode = CAM_CDS_MODE_OFF;
10790
10791 int32_t mode = cds_mode;
10792 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010793
Thierry Strudel269c81a2016-10-12 12:13:59 -070010794 /* Manual Convergence AEC Speed is disabled by default*/
10795 float default_aec_speed = 0;
10796 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10797
10798 /* Manual Convergence AWB Speed is disabled by default*/
10799 float default_awb_speed = 0;
10800 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10801
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010802 // Set instant AEC to normal convergence by default
10803 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10804 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10805
Shuzhen Wang19463d72016-03-08 11:09:52 -080010806 /* hybrid ae */
10807 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10808
Thierry Strudel3d639192016-09-09 11:52:26 -070010809 mDefaultMetadata[type] = settings.release();
10810
10811 return mDefaultMetadata[type];
10812}
10813
10814/*===========================================================================
10815 * FUNCTION : setFrameParameters
10816 *
10817 * DESCRIPTION: set parameters per frame as requested in the metadata from
10818 * framework
10819 *
10820 * PARAMETERS :
10821 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010822 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010823 * @blob_request: Whether this request is a blob request or not
10824 *
10825 * RETURN : success: NO_ERROR
10826 * failure:
10827 *==========================================================================*/
10828int QCamera3HardwareInterface::setFrameParameters(
10829 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010830 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010831 int blob_request,
10832 uint32_t snapshotStreamId)
10833{
10834 /*translate from camera_metadata_t type to parm_type_t*/
10835 int rc = 0;
10836 int32_t hal_version = CAM_HAL_V3;
10837
10838 clear_metadata_buffer(mParameters);
10839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10840 LOGE("Failed to set hal version in the parameters");
10841 return BAD_VALUE;
10842 }
10843
10844 /*we need to update the frame number in the parameters*/
10845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10846 request->frame_number)) {
10847 LOGE("Failed to set the frame number in the parameters");
10848 return BAD_VALUE;
10849 }
10850
10851 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010853 LOGE("Failed to set stream type mask in the parameters");
10854 return BAD_VALUE;
10855 }
10856
10857 if (mUpdateDebugLevel) {
10858 uint32_t dummyDebugLevel = 0;
10859 /* The value of dummyDebugLevel is irrelavent. On
10860 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10862 dummyDebugLevel)) {
10863 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10864 return BAD_VALUE;
10865 }
10866 mUpdateDebugLevel = false;
10867 }
10868
10869 if(request->settings != NULL){
10870 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10871 if (blob_request)
10872 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10873 }
10874
10875 return rc;
10876}
10877
10878/*===========================================================================
10879 * FUNCTION : setReprocParameters
10880 *
10881 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10882 * return it.
10883 *
10884 * PARAMETERS :
10885 * @request : request that needs to be serviced
10886 *
10887 * RETURN : success: NO_ERROR
10888 * failure:
10889 *==========================================================================*/
10890int32_t QCamera3HardwareInterface::setReprocParameters(
10891 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10892 uint32_t snapshotStreamId)
10893{
10894 /*translate from camera_metadata_t type to parm_type_t*/
10895 int rc = 0;
10896
10897 if (NULL == request->settings){
10898 LOGE("Reprocess settings cannot be NULL");
10899 return BAD_VALUE;
10900 }
10901
10902 if (NULL == reprocParam) {
10903 LOGE("Invalid reprocessing metadata buffer");
10904 return BAD_VALUE;
10905 }
10906 clear_metadata_buffer(reprocParam);
10907
10908 /*we need to update the frame number in the parameters*/
10909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10910 request->frame_number)) {
10911 LOGE("Failed to set the frame number in the parameters");
10912 return BAD_VALUE;
10913 }
10914
10915 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10916 if (rc < 0) {
10917 LOGE("Failed to translate reproc request");
10918 return rc;
10919 }
10920
10921 CameraMetadata frame_settings;
10922 frame_settings = request->settings;
10923 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10924 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10925 int32_t *crop_count =
10926 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10927 int32_t *crop_data =
10928 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10929 int32_t *roi_map =
10930 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10931 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10932 cam_crop_data_t crop_meta;
10933 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10934 crop_meta.num_of_streams = 1;
10935 crop_meta.crop_info[0].crop.left = crop_data[0];
10936 crop_meta.crop_info[0].crop.top = crop_data[1];
10937 crop_meta.crop_info[0].crop.width = crop_data[2];
10938 crop_meta.crop_info[0].crop.height = crop_data[3];
10939
10940 crop_meta.crop_info[0].roi_map.left =
10941 roi_map[0];
10942 crop_meta.crop_info[0].roi_map.top =
10943 roi_map[1];
10944 crop_meta.crop_info[0].roi_map.width =
10945 roi_map[2];
10946 crop_meta.crop_info[0].roi_map.height =
10947 roi_map[3];
10948
10949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10950 rc = BAD_VALUE;
10951 }
10952 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10953 request->input_buffer->stream,
10954 crop_meta.crop_info[0].crop.left,
10955 crop_meta.crop_info[0].crop.top,
10956 crop_meta.crop_info[0].crop.width,
10957 crop_meta.crop_info[0].crop.height);
10958 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10959 request->input_buffer->stream,
10960 crop_meta.crop_info[0].roi_map.left,
10961 crop_meta.crop_info[0].roi_map.top,
10962 crop_meta.crop_info[0].roi_map.width,
10963 crop_meta.crop_info[0].roi_map.height);
10964 } else {
10965 LOGE("Invalid reprocess crop count %d!", *crop_count);
10966 }
10967 } else {
10968 LOGE("No crop data from matching output stream");
10969 }
10970
10971 /* These settings are not needed for regular requests so handle them specially for
10972 reprocess requests; information needed for EXIF tags */
10973 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10974 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10975 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10976 if (NAME_NOT_FOUND != val) {
10977 uint32_t flashMode = (uint32_t)val;
10978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10979 rc = BAD_VALUE;
10980 }
10981 } else {
10982 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10983 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10984 }
10985 } else {
10986 LOGH("No flash mode in reprocess settings");
10987 }
10988
10989 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10990 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10992 rc = BAD_VALUE;
10993 }
10994 } else {
10995 LOGH("No flash state in reprocess settings");
10996 }
10997
10998 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10999 uint8_t *reprocessFlags =
11000 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11002 *reprocessFlags)) {
11003 rc = BAD_VALUE;
11004 }
11005 }
11006
Thierry Strudel54dc9782017-02-15 12:12:10 -080011007 // Add exif debug data to internal metadata
11008 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11009 mm_jpeg_debug_exif_params_t *debug_params =
11010 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11011 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11012 // AE
11013 if (debug_params->ae_debug_params_valid == TRUE) {
11014 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11015 debug_params->ae_debug_params);
11016 }
11017 // AWB
11018 if (debug_params->awb_debug_params_valid == TRUE) {
11019 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11020 debug_params->awb_debug_params);
11021 }
11022 // AF
11023 if (debug_params->af_debug_params_valid == TRUE) {
11024 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11025 debug_params->af_debug_params);
11026 }
11027 // ASD
11028 if (debug_params->asd_debug_params_valid == TRUE) {
11029 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11030 debug_params->asd_debug_params);
11031 }
11032 // Stats
11033 if (debug_params->stats_debug_params_valid == TRUE) {
11034 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11035 debug_params->stats_debug_params);
11036 }
11037 // BE Stats
11038 if (debug_params->bestats_debug_params_valid == TRUE) {
11039 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11040 debug_params->bestats_debug_params);
11041 }
11042 // BHIST
11043 if (debug_params->bhist_debug_params_valid == TRUE) {
11044 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11045 debug_params->bhist_debug_params);
11046 }
11047 // 3A Tuning
11048 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11049 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11050 debug_params->q3a_tuning_debug_params);
11051 }
11052 }
11053
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011054 // Add metadata which reprocess needs
11055 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11056 cam_reprocess_info_t *repro_info =
11057 (cam_reprocess_info_t *)frame_settings.find
11058 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011059 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011060 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011061 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011062 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011063 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011064 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011065 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011066 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011067 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011068 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011069 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011070 repro_info->pipeline_flip);
11071 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11072 repro_info->af_roi);
11073 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11074 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011075 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11076 CAM_INTF_PARM_ROTATION metadata then has been added in
11077 translateToHalMetadata. HAL need to keep this new rotation
11078 metadata. Otherwise, the old rotation info saved in the vendor tag
11079 would be used */
11080 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11081 CAM_INTF_PARM_ROTATION, reprocParam) {
11082 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11083 } else {
11084 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011085 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011086 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011087 }
11088
11089 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11090 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11091 roi.width and roi.height would be the final JPEG size.
11092 For now, HAL only checks this for reprocess request */
11093 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11094 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11095 uint8_t *enable =
11096 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11097 if (*enable == TRUE) {
11098 int32_t *crop_data =
11099 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11100 cam_stream_crop_info_t crop_meta;
11101 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11102 crop_meta.stream_id = 0;
11103 crop_meta.crop.left = crop_data[0];
11104 crop_meta.crop.top = crop_data[1];
11105 crop_meta.crop.width = crop_data[2];
11106 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011107 // The JPEG crop roi should match cpp output size
11108 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11109 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11110 crop_meta.roi_map.left = 0;
11111 crop_meta.roi_map.top = 0;
11112 crop_meta.roi_map.width = cpp_crop->crop.width;
11113 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011114 }
11115 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11116 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011117 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011118 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011119 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11120 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011121 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011122 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11123
11124 // Add JPEG scale information
11125 cam_dimension_t scale_dim;
11126 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11127 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11128 int32_t *roi =
11129 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11130 scale_dim.width = roi[2];
11131 scale_dim.height = roi[3];
11132 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11133 scale_dim);
11134 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11135 scale_dim.width, scale_dim.height, mCameraId);
11136 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011137 }
11138 }
11139
11140 return rc;
11141}
11142
11143/*===========================================================================
11144 * FUNCTION : saveRequestSettings
11145 *
11146 * DESCRIPTION: Add any settings that might have changed to the request settings
11147 * and save the settings to be applied on the frame
11148 *
11149 * PARAMETERS :
11150 * @jpegMetadata : the extracted and/or modified jpeg metadata
11151 * @request : request with initial settings
11152 *
11153 * RETURN :
11154 * camera_metadata_t* : pointer to the saved request settings
11155 *==========================================================================*/
11156camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11157 const CameraMetadata &jpegMetadata,
11158 camera3_capture_request_t *request)
11159{
11160 camera_metadata_t *resultMetadata;
11161 CameraMetadata camMetadata;
11162 camMetadata = request->settings;
11163
11164 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11165 int32_t thumbnail_size[2];
11166 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11167 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11168 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11169 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11170 }
11171
11172 if (request->input_buffer != NULL) {
11173 uint8_t reprocessFlags = 1;
11174 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11175 (uint8_t*)&reprocessFlags,
11176 sizeof(reprocessFlags));
11177 }
11178
11179 resultMetadata = camMetadata.release();
11180 return resultMetadata;
11181}
11182
11183/*===========================================================================
11184 * FUNCTION : setHalFpsRange
11185 *
11186 * DESCRIPTION: set FPS range parameter
11187 *
11188 *
11189 * PARAMETERS :
11190 * @settings : Metadata from framework
11191 * @hal_metadata: Metadata buffer
11192 *
11193 *
11194 * RETURN : success: NO_ERROR
11195 * failure:
11196 *==========================================================================*/
11197int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11198 metadata_buffer_t *hal_metadata)
11199{
11200 int32_t rc = NO_ERROR;
11201 cam_fps_range_t fps_range;
11202 fps_range.min_fps = (float)
11203 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11204 fps_range.max_fps = (float)
11205 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11206 fps_range.video_min_fps = fps_range.min_fps;
11207 fps_range.video_max_fps = fps_range.max_fps;
11208
11209 LOGD("aeTargetFpsRange fps: [%f %f]",
11210 fps_range.min_fps, fps_range.max_fps);
11211 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11212 * follows:
11213 * ---------------------------------------------------------------|
11214 * Video stream is absent in configure_streams |
11215 * (Camcorder preview before the first video record |
11216 * ---------------------------------------------------------------|
11217 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11218 * | | | vid_min/max_fps|
11219 * ---------------------------------------------------------------|
11220 * NO | [ 30, 240] | 240 | [240, 240] |
11221 * |-------------|-------------|----------------|
11222 * | [240, 240] | 240 | [240, 240] |
11223 * ---------------------------------------------------------------|
11224 * Video stream is present in configure_streams |
11225 * ---------------------------------------------------------------|
11226 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11227 * | | | vid_min/max_fps|
11228 * ---------------------------------------------------------------|
11229 * NO | [ 30, 240] | 240 | [240, 240] |
11230 * (camcorder prev |-------------|-------------|----------------|
11231 * after video rec | [240, 240] | 240 | [240, 240] |
11232 * is stopped) | | | |
11233 * ---------------------------------------------------------------|
11234 * YES | [ 30, 240] | 240 | [240, 240] |
11235 * |-------------|-------------|----------------|
11236 * | [240, 240] | 240 | [240, 240] |
11237 * ---------------------------------------------------------------|
11238 * When Video stream is absent in configure_streams,
11239 * preview fps = sensor_fps / batchsize
11240 * Eg: for 240fps at batchSize 4, preview = 60fps
11241 * for 120fps at batchSize 4, preview = 30fps
11242 *
11243 * When video stream is present in configure_streams, preview fps is as per
11244 * the ratio of preview buffers to video buffers requested in process
11245 * capture request
11246 */
11247 mBatchSize = 0;
11248 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11249 fps_range.min_fps = fps_range.video_max_fps;
11250 fps_range.video_min_fps = fps_range.video_max_fps;
11251 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11252 fps_range.max_fps);
11253 if (NAME_NOT_FOUND != val) {
11254 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11256 return BAD_VALUE;
11257 }
11258
11259 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11260 /* If batchmode is currently in progress and the fps changes,
11261 * set the flag to restart the sensor */
11262 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11263 (mHFRVideoFps != fps_range.max_fps)) {
11264 mNeedSensorRestart = true;
11265 }
11266 mHFRVideoFps = fps_range.max_fps;
11267 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11268 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11269 mBatchSize = MAX_HFR_BATCH_SIZE;
11270 }
11271 }
11272 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11273
11274 }
11275 } else {
11276 /* HFR mode is session param in backend/ISP. This should be reset when
11277 * in non-HFR mode */
11278 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11280 return BAD_VALUE;
11281 }
11282 }
11283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11284 return BAD_VALUE;
11285 }
11286 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11287 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11288 return rc;
11289}
11290
11291/*===========================================================================
11292 * FUNCTION : translateToHalMetadata
11293 *
11294 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11295 *
11296 *
11297 * PARAMETERS :
11298 * @request : request sent from framework
11299 *
11300 *
11301 * RETURN : success: NO_ERROR
11302 * failure:
11303 *==========================================================================*/
11304int QCamera3HardwareInterface::translateToHalMetadata
11305 (const camera3_capture_request_t *request,
11306 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011307 uint32_t snapshotStreamId) {
11308 if (request == nullptr || hal_metadata == nullptr) {
11309 return BAD_VALUE;
11310 }
11311
11312 int64_t minFrameDuration = getMinFrameDuration(request);
11313
11314 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11315 minFrameDuration);
11316}
11317
11318int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11319 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11320 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11321
Thierry Strudel3d639192016-09-09 11:52:26 -070011322 int rc = 0;
11323 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011324 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011325
11326 /* Do not change the order of the following list unless you know what you are
11327 * doing.
11328 * The order is laid out in such a way that parameters in the front of the table
11329 * may be used to override the parameters later in the table. Examples are:
11330 * 1. META_MODE should precede AEC/AWB/AF MODE
11331 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11332 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11333 * 4. Any mode should precede it's corresponding settings
11334 */
11335 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11336 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11338 rc = BAD_VALUE;
11339 }
11340 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11341 if (rc != NO_ERROR) {
11342 LOGE("extractSceneMode failed");
11343 }
11344 }
11345
11346 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11347 uint8_t fwk_aeMode =
11348 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11349 uint8_t aeMode;
11350 int32_t redeye;
11351
11352 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11353 aeMode = CAM_AE_MODE_OFF;
11354 } else {
11355 aeMode = CAM_AE_MODE_ON;
11356 }
11357 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11358 redeye = 1;
11359 } else {
11360 redeye = 0;
11361 }
11362
11363 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11364 fwk_aeMode);
11365 if (NAME_NOT_FOUND != val) {
11366 int32_t flashMode = (int32_t)val;
11367 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11368 }
11369
11370 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11372 rc = BAD_VALUE;
11373 }
11374 }
11375
11376 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11377 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11378 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11379 fwk_whiteLevel);
11380 if (NAME_NOT_FOUND != val) {
11381 uint8_t whiteLevel = (uint8_t)val;
11382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11383 rc = BAD_VALUE;
11384 }
11385 }
11386 }
11387
11388 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11389 uint8_t fwk_cacMode =
11390 frame_settings.find(
11391 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11392 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11393 fwk_cacMode);
11394 if (NAME_NOT_FOUND != val) {
11395 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11396 bool entryAvailable = FALSE;
11397 // Check whether Frameworks set CAC mode is supported in device or not
11398 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11399 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11400 entryAvailable = TRUE;
11401 break;
11402 }
11403 }
11404 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11405 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11406 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11407 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11408 if (entryAvailable == FALSE) {
11409 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11410 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11411 } else {
11412 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11413 // High is not supported and so set the FAST as spec say's underlying
11414 // device implementation can be the same for both modes.
11415 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11416 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11417 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11418 // in order to avoid the fps drop due to high quality
11419 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11420 } else {
11421 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11422 }
11423 }
11424 }
11425 LOGD("Final cacMode is %d", cacMode);
11426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11427 rc = BAD_VALUE;
11428 }
11429 } else {
11430 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11431 }
11432 }
11433
Thierry Strudel2896d122017-02-23 19:18:03 -080011434 char af_value[PROPERTY_VALUE_MAX];
11435 property_get("persist.camera.af.infinity", af_value, "0");
11436
Jason Lee84ae9972017-02-24 13:24:24 -080011437 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011438 if (atoi(af_value) == 0) {
11439 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011440 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011441 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11442 fwk_focusMode);
11443 if (NAME_NOT_FOUND != val) {
11444 uint8_t focusMode = (uint8_t)val;
11445 LOGD("set focus mode %d", focusMode);
11446 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11447 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11448 rc = BAD_VALUE;
11449 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011450 }
11451 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011452 } else {
11453 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11454 LOGE("Focus forced to infinity %d", focusMode);
11455 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11456 rc = BAD_VALUE;
11457 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011458 }
11459
Jason Lee84ae9972017-02-24 13:24:24 -080011460 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11461 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011462 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11464 focalDistance)) {
11465 rc = BAD_VALUE;
11466 }
11467 }
11468
11469 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11470 uint8_t fwk_antibandingMode =
11471 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11472 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11473 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11474 if (NAME_NOT_FOUND != val) {
11475 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011476 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11477 if (m60HzZone) {
11478 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11479 } else {
11480 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11481 }
11482 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11484 hal_antibandingMode)) {
11485 rc = BAD_VALUE;
11486 }
11487 }
11488 }
11489
11490 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11491 int32_t expCompensation = frame_settings.find(
11492 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11493 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11494 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11495 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11496 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011497 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11499 expCompensation)) {
11500 rc = BAD_VALUE;
11501 }
11502 }
11503
11504 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11505 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11507 rc = BAD_VALUE;
11508 }
11509 }
11510 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11511 rc = setHalFpsRange(frame_settings, hal_metadata);
11512 if (rc != NO_ERROR) {
11513 LOGE("setHalFpsRange failed");
11514 }
11515 }
11516
11517 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11518 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11520 rc = BAD_VALUE;
11521 }
11522 }
11523
11524 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11525 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11526 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11527 fwk_effectMode);
11528 if (NAME_NOT_FOUND != val) {
11529 uint8_t effectMode = (uint8_t)val;
11530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11531 rc = BAD_VALUE;
11532 }
11533 }
11534 }
11535
11536 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11537 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11539 colorCorrectMode)) {
11540 rc = BAD_VALUE;
11541 }
11542 }
11543
11544 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11545 cam_color_correct_gains_t colorCorrectGains;
11546 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11547 colorCorrectGains.gains[i] =
11548 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11549 }
11550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11551 colorCorrectGains)) {
11552 rc = BAD_VALUE;
11553 }
11554 }
11555
11556 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11557 cam_color_correct_matrix_t colorCorrectTransform;
11558 cam_rational_type_t transform_elem;
11559 size_t num = 0;
11560 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11561 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11562 transform_elem.numerator =
11563 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11564 transform_elem.denominator =
11565 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11566 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11567 num++;
11568 }
11569 }
11570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11571 colorCorrectTransform)) {
11572 rc = BAD_VALUE;
11573 }
11574 }
11575
11576 cam_trigger_t aecTrigger;
11577 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11578 aecTrigger.trigger_id = -1;
11579 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11580 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11581 aecTrigger.trigger =
11582 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11583 aecTrigger.trigger_id =
11584 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11586 aecTrigger)) {
11587 rc = BAD_VALUE;
11588 }
11589 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11590 aecTrigger.trigger, aecTrigger.trigger_id);
11591 }
11592
11593 /*af_trigger must come with a trigger id*/
11594 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11595 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11596 cam_trigger_t af_trigger;
11597 af_trigger.trigger =
11598 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11599 af_trigger.trigger_id =
11600 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11602 rc = BAD_VALUE;
11603 }
11604 LOGD("AfTrigger: %d AfTriggerID: %d",
11605 af_trigger.trigger, af_trigger.trigger_id);
11606 }
11607
11608 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11609 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11610 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11611 rc = BAD_VALUE;
11612 }
11613 }
11614 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11615 cam_edge_application_t edge_application;
11616 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011617
Thierry Strudel3d639192016-09-09 11:52:26 -070011618 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11619 edge_application.sharpness = 0;
11620 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011621 edge_application.sharpness =
11622 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11623 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11624 int32_t sharpness =
11625 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11626 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11627 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11628 LOGD("Setting edge mode sharpness %d", sharpness);
11629 edge_application.sharpness = sharpness;
11630 }
11631 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011632 }
11633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11634 rc = BAD_VALUE;
11635 }
11636 }
11637
11638 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11639 int32_t respectFlashMode = 1;
11640 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11641 uint8_t fwk_aeMode =
11642 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11643 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11644 respectFlashMode = 0;
11645 LOGH("AE Mode controls flash, ignore android.flash.mode");
11646 }
11647 }
11648 if (respectFlashMode) {
11649 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11650 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11651 LOGH("flash mode after mapping %d", val);
11652 // To check: CAM_INTF_META_FLASH_MODE usage
11653 if (NAME_NOT_FOUND != val) {
11654 uint8_t flashMode = (uint8_t)val;
11655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11656 rc = BAD_VALUE;
11657 }
11658 }
11659 }
11660 }
11661
11662 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11663 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11665 rc = BAD_VALUE;
11666 }
11667 }
11668
11669 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11670 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11672 flashFiringTime)) {
11673 rc = BAD_VALUE;
11674 }
11675 }
11676
11677 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11678 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11680 hotPixelMode)) {
11681 rc = BAD_VALUE;
11682 }
11683 }
11684
11685 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11686 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11688 lensAperture)) {
11689 rc = BAD_VALUE;
11690 }
11691 }
11692
11693 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11694 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11696 filterDensity)) {
11697 rc = BAD_VALUE;
11698 }
11699 }
11700
11701 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11702 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11704 focalLength)) {
11705 rc = BAD_VALUE;
11706 }
11707 }
11708
11709 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11710 uint8_t optStabMode =
11711 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11713 optStabMode)) {
11714 rc = BAD_VALUE;
11715 }
11716 }
11717
11718 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11719 uint8_t videoStabMode =
11720 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11721 LOGD("videoStabMode from APP = %d", videoStabMode);
11722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11723 videoStabMode)) {
11724 rc = BAD_VALUE;
11725 }
11726 }
11727
11728
11729 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11730 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11732 noiseRedMode)) {
11733 rc = BAD_VALUE;
11734 }
11735 }
11736
11737 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11738 float reprocessEffectiveExposureFactor =
11739 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11740 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11741 reprocessEffectiveExposureFactor)) {
11742 rc = BAD_VALUE;
11743 }
11744 }
11745
11746 cam_crop_region_t scalerCropRegion;
11747 bool scalerCropSet = false;
11748 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11749 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11750 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11751 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11752 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11753
11754 // Map coordinate system from active array to sensor output.
11755 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11756 scalerCropRegion.width, scalerCropRegion.height);
11757
11758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11759 scalerCropRegion)) {
11760 rc = BAD_VALUE;
11761 }
11762 scalerCropSet = true;
11763 }
11764
11765 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11766 int64_t sensorExpTime =
11767 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11768 LOGD("setting sensorExpTime %lld", sensorExpTime);
11769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11770 sensorExpTime)) {
11771 rc = BAD_VALUE;
11772 }
11773 }
11774
11775 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11776 int64_t sensorFrameDuration =
11777 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011778 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11779 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11780 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11781 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11783 sensorFrameDuration)) {
11784 rc = BAD_VALUE;
11785 }
11786 }
11787
11788 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11789 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11790 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11791 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11792 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11793 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11794 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11795 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11796 sensorSensitivity)) {
11797 rc = BAD_VALUE;
11798 }
11799 }
11800
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011801#ifndef USE_HAL_3_3
11802 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11803 int32_t ispSensitivity =
11804 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11805 if (ispSensitivity <
11806 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11807 ispSensitivity =
11808 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11809 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11810 }
11811 if (ispSensitivity >
11812 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11813 ispSensitivity =
11814 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11815 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11816 }
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11818 ispSensitivity)) {
11819 rc = BAD_VALUE;
11820 }
11821 }
11822#endif
11823
Thierry Strudel3d639192016-09-09 11:52:26 -070011824 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11825 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11827 rc = BAD_VALUE;
11828 }
11829 }
11830
11831 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11832 uint8_t fwk_facedetectMode =
11833 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11834
11835 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11836 fwk_facedetectMode);
11837
11838 if (NAME_NOT_FOUND != val) {
11839 uint8_t facedetectMode = (uint8_t)val;
11840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11841 facedetectMode)) {
11842 rc = BAD_VALUE;
11843 }
11844 }
11845 }
11846
Thierry Strudel54dc9782017-02-15 12:12:10 -080011847 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011848 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011849 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11851 histogramMode)) {
11852 rc = BAD_VALUE;
11853 }
11854 }
11855
11856 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11857 uint8_t sharpnessMapMode =
11858 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11860 sharpnessMapMode)) {
11861 rc = BAD_VALUE;
11862 }
11863 }
11864
11865 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11866 uint8_t tonemapMode =
11867 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11869 rc = BAD_VALUE;
11870 }
11871 }
11872 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11873 /*All tonemap channels will have the same number of points*/
11874 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11875 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11876 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11877 cam_rgb_tonemap_curves tonemapCurves;
11878 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11879 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11880 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11881 tonemapCurves.tonemap_points_cnt,
11882 CAM_MAX_TONEMAP_CURVE_SIZE);
11883 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11884 }
11885
11886 /* ch0 = G*/
11887 size_t point = 0;
11888 cam_tonemap_curve_t tonemapCurveGreen;
11889 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11890 for (size_t j = 0; j < 2; j++) {
11891 tonemapCurveGreen.tonemap_points[i][j] =
11892 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11893 point++;
11894 }
11895 }
11896 tonemapCurves.curves[0] = tonemapCurveGreen;
11897
11898 /* ch 1 = B */
11899 point = 0;
11900 cam_tonemap_curve_t tonemapCurveBlue;
11901 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11902 for (size_t j = 0; j < 2; j++) {
11903 tonemapCurveBlue.tonemap_points[i][j] =
11904 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11905 point++;
11906 }
11907 }
11908 tonemapCurves.curves[1] = tonemapCurveBlue;
11909
11910 /* ch 2 = R */
11911 point = 0;
11912 cam_tonemap_curve_t tonemapCurveRed;
11913 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11914 for (size_t j = 0; j < 2; j++) {
11915 tonemapCurveRed.tonemap_points[i][j] =
11916 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11917 point++;
11918 }
11919 }
11920 tonemapCurves.curves[2] = tonemapCurveRed;
11921
11922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11923 tonemapCurves)) {
11924 rc = BAD_VALUE;
11925 }
11926 }
11927
11928 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11929 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11931 captureIntent)) {
11932 rc = BAD_VALUE;
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11937 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11939 blackLevelLock)) {
11940 rc = BAD_VALUE;
11941 }
11942 }
11943
11944 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11945 uint8_t lensShadingMapMode =
11946 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11948 lensShadingMapMode)) {
11949 rc = BAD_VALUE;
11950 }
11951 }
11952
11953 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11954 cam_area_t roi;
11955 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011956 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011957
11958 // Map coordinate system from active array to sensor output.
11959 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11960 roi.rect.height);
11961
11962 if (scalerCropSet) {
11963 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11964 }
11965 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11966 rc = BAD_VALUE;
11967 }
11968 }
11969
11970 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11971 cam_area_t roi;
11972 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011973 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011974
11975 // Map coordinate system from active array to sensor output.
11976 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11977 roi.rect.height);
11978
11979 if (scalerCropSet) {
11980 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11981 }
11982 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11983 rc = BAD_VALUE;
11984 }
11985 }
11986
11987 // CDS for non-HFR non-video mode
11988 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11989 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11990 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11991 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11992 LOGE("Invalid CDS mode %d!", *fwk_cds);
11993 } else {
11994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11995 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11996 rc = BAD_VALUE;
11997 }
11998 }
11999 }
12000
Thierry Strudel04e026f2016-10-10 11:27:36 -070012001 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012002 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012003 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012004 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12005 }
12006 if (m_bVideoHdrEnabled)
12007 vhdr = CAM_VIDEO_HDR_MODE_ON;
12008
Thierry Strudel54dc9782017-02-15 12:12:10 -080012009 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12010
12011 if(vhdr != curr_hdr_state)
12012 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12013
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012014 rc = setVideoHdrMode(mParameters, vhdr);
12015 if (rc != NO_ERROR) {
12016 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012017 }
12018
12019 //IR
12020 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12021 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12022 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012023 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12024 uint8_t isIRon = 0;
12025
12026 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012027 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12028 LOGE("Invalid IR mode %d!", fwk_ir);
12029 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012030 if(isIRon != curr_ir_state )
12031 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12032
Thierry Strudel04e026f2016-10-10 11:27:36 -070012033 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12034 CAM_INTF_META_IR_MODE, fwk_ir)) {
12035 rc = BAD_VALUE;
12036 }
12037 }
12038 }
12039
Thierry Strudel54dc9782017-02-15 12:12:10 -080012040 //Binning Correction Mode
12041 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12042 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12043 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12044 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12045 || (0 > fwk_binning_correction)) {
12046 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12047 } else {
12048 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12049 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12050 rc = BAD_VALUE;
12051 }
12052 }
12053 }
12054
Thierry Strudel269c81a2016-10-12 12:13:59 -070012055 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12056 float aec_speed;
12057 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12058 LOGD("AEC Speed :%f", aec_speed);
12059 if ( aec_speed < 0 ) {
12060 LOGE("Invalid AEC mode %f!", aec_speed);
12061 } else {
12062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12063 aec_speed)) {
12064 rc = BAD_VALUE;
12065 }
12066 }
12067 }
12068
12069 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12070 float awb_speed;
12071 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12072 LOGD("AWB Speed :%f", awb_speed);
12073 if ( awb_speed < 0 ) {
12074 LOGE("Invalid AWB mode %f!", awb_speed);
12075 } else {
12076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12077 awb_speed)) {
12078 rc = BAD_VALUE;
12079 }
12080 }
12081 }
12082
Thierry Strudel3d639192016-09-09 11:52:26 -070012083 // TNR
12084 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12085 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12086 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012087 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012088 cam_denoise_param_t tnr;
12089 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12090 tnr.process_plates =
12091 (cam_denoise_process_type_t)frame_settings.find(
12092 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12093 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012094
12095 if(b_TnrRequested != curr_tnr_state)
12096 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12097
Thierry Strudel3d639192016-09-09 11:52:26 -070012098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12099 rc = BAD_VALUE;
12100 }
12101 }
12102
Thierry Strudel54dc9782017-02-15 12:12:10 -080012103 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012104 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012105 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12107 *exposure_metering_mode)) {
12108 rc = BAD_VALUE;
12109 }
12110 }
12111
Thierry Strudel3d639192016-09-09 11:52:26 -070012112 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12113 int32_t fwk_testPatternMode =
12114 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12115 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12116 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12117
12118 if (NAME_NOT_FOUND != testPatternMode) {
12119 cam_test_pattern_data_t testPatternData;
12120 memset(&testPatternData, 0, sizeof(testPatternData));
12121 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12122 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12123 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12124 int32_t *fwk_testPatternData =
12125 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12126 testPatternData.r = fwk_testPatternData[0];
12127 testPatternData.b = fwk_testPatternData[3];
12128 switch (gCamCapability[mCameraId]->color_arrangement) {
12129 case CAM_FILTER_ARRANGEMENT_RGGB:
12130 case CAM_FILTER_ARRANGEMENT_GRBG:
12131 testPatternData.gr = fwk_testPatternData[1];
12132 testPatternData.gb = fwk_testPatternData[2];
12133 break;
12134 case CAM_FILTER_ARRANGEMENT_GBRG:
12135 case CAM_FILTER_ARRANGEMENT_BGGR:
12136 testPatternData.gr = fwk_testPatternData[2];
12137 testPatternData.gb = fwk_testPatternData[1];
12138 break;
12139 default:
12140 LOGE("color arrangement %d is not supported",
12141 gCamCapability[mCameraId]->color_arrangement);
12142 break;
12143 }
12144 }
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12146 testPatternData)) {
12147 rc = BAD_VALUE;
12148 }
12149 } else {
12150 LOGE("Invalid framework sensor test pattern mode %d",
12151 fwk_testPatternMode);
12152 }
12153 }
12154
12155 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12156 size_t count = 0;
12157 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12158 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12159 gps_coords.data.d, gps_coords.count, count);
12160 if (gps_coords.count != count) {
12161 rc = BAD_VALUE;
12162 }
12163 }
12164
12165 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12166 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12167 size_t count = 0;
12168 const char *gps_methods_src = (const char *)
12169 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12170 memset(gps_methods, '\0', sizeof(gps_methods));
12171 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12172 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12173 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12174 if (GPS_PROCESSING_METHOD_SIZE != count) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178
12179 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12180 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12182 gps_timestamp)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12188 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12189 cam_rotation_info_t rotation_info;
12190 if (orientation == 0) {
12191 rotation_info.rotation = ROTATE_0;
12192 } else if (orientation == 90) {
12193 rotation_info.rotation = ROTATE_90;
12194 } else if (orientation == 180) {
12195 rotation_info.rotation = ROTATE_180;
12196 } else if (orientation == 270) {
12197 rotation_info.rotation = ROTATE_270;
12198 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012199 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012200 rotation_info.streamId = snapshotStreamId;
12201 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12203 rc = BAD_VALUE;
12204 }
12205 }
12206
12207 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12208 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12210 rc = BAD_VALUE;
12211 }
12212 }
12213
12214 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12215 uint32_t thumb_quality = (uint32_t)
12216 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12218 thumb_quality)) {
12219 rc = BAD_VALUE;
12220 }
12221 }
12222
12223 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12224 cam_dimension_t dim;
12225 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12226 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12228 rc = BAD_VALUE;
12229 }
12230 }
12231
12232 // Internal metadata
12233 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12234 size_t count = 0;
12235 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12236 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12237 privatedata.data.i32, privatedata.count, count);
12238 if (privatedata.count != count) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012243 // ISO/Exposure Priority
12244 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12245 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12246 cam_priority_mode_t mode =
12247 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12248 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12249 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12250 use_iso_exp_pty.previewOnly = FALSE;
12251 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12252 use_iso_exp_pty.value = *ptr;
12253
12254 if(CAM_ISO_PRIORITY == mode) {
12255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12256 use_iso_exp_pty)) {
12257 rc = BAD_VALUE;
12258 }
12259 }
12260 else {
12261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12262 use_iso_exp_pty)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012266
12267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12268 rc = BAD_VALUE;
12269 }
12270 }
12271 } else {
12272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12273 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012274 }
12275 }
12276
12277 // Saturation
12278 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12279 int32_t* use_saturation =
12280 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285
Thierry Strudel3d639192016-09-09 11:52:26 -070012286 // EV step
12287 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12288 gCamCapability[mCameraId]->exp_compensation_step)) {
12289 rc = BAD_VALUE;
12290 }
12291
12292 // CDS info
12293 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12294 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12295 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12296
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12298 CAM_INTF_META_CDS_DATA, *cdsData)) {
12299 rc = BAD_VALUE;
12300 }
12301 }
12302
Shuzhen Wang19463d72016-03-08 11:09:52 -080012303 // Hybrid AE
12304 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12305 uint8_t *hybrid_ae = (uint8_t *)
12306 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12307
12308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12309 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12310 rc = BAD_VALUE;
12311 }
12312 }
12313
Thierry Strudel3d639192016-09-09 11:52:26 -070012314 return rc;
12315}
12316
12317/*===========================================================================
12318 * FUNCTION : captureResultCb
12319 *
12320 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12321 *
12322 * PARAMETERS :
12323 * @frame : frame information from mm-camera-interface
12324 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12325 * @userdata: userdata
12326 *
12327 * RETURN : NONE
12328 *==========================================================================*/
12329void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12330 camera3_stream_buffer_t *buffer,
12331 uint32_t frame_number, bool isInputBuffer, void *userdata)
12332{
12333 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12334 if (hw == NULL) {
12335 LOGE("Invalid hw %p", hw);
12336 return;
12337 }
12338
12339 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12340 return;
12341}
12342
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012343/*===========================================================================
12344 * FUNCTION : setBufferErrorStatus
12345 *
12346 * DESCRIPTION: Callback handler for channels to report any buffer errors
12347 *
12348 * PARAMETERS :
12349 * @ch : Channel on which buffer error is reported from
12350 * @frame_number : frame number on which buffer error is reported on
12351 * @buffer_status : buffer error status
12352 * @userdata: userdata
12353 *
12354 * RETURN : NONE
12355 *==========================================================================*/
12356void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12357 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12358{
12359 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12360 if (hw == NULL) {
12361 LOGE("Invalid hw %p", hw);
12362 return;
12363 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012364
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012365 hw->setBufferErrorStatus(ch, frame_number, err);
12366 return;
12367}
12368
12369void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12370 uint32_t frameNumber, camera3_buffer_status_t err)
12371{
12372 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12373 pthread_mutex_lock(&mMutex);
12374
12375 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12376 if (req.frame_number != frameNumber)
12377 continue;
12378 for (auto& k : req.mPendingBufferList) {
12379 if(k.stream->priv == ch) {
12380 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12381 }
12382 }
12383 }
12384
12385 pthread_mutex_unlock(&mMutex);
12386 return;
12387}
Thierry Strudel3d639192016-09-09 11:52:26 -070012388/*===========================================================================
12389 * FUNCTION : initialize
12390 *
12391 * DESCRIPTION: Pass framework callback pointers to HAL
12392 *
12393 * PARAMETERS :
12394 *
12395 *
12396 * RETURN : Success : 0
12397 * Failure: -ENODEV
12398 *==========================================================================*/
12399
12400int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12401 const camera3_callback_ops_t *callback_ops)
12402{
12403 LOGD("E");
12404 QCamera3HardwareInterface *hw =
12405 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12406 if (!hw) {
12407 LOGE("NULL camera device");
12408 return -ENODEV;
12409 }
12410
12411 int rc = hw->initialize(callback_ops);
12412 LOGD("X");
12413 return rc;
12414}
12415
12416/*===========================================================================
12417 * FUNCTION : configure_streams
12418 *
12419 * DESCRIPTION:
12420 *
12421 * PARAMETERS :
12422 *
12423 *
12424 * RETURN : Success: 0
12425 * Failure: -EINVAL (if stream configuration is invalid)
12426 * -ENODEV (fatal error)
12427 *==========================================================================*/
12428
12429int QCamera3HardwareInterface::configure_streams(
12430 const struct camera3_device *device,
12431 camera3_stream_configuration_t *stream_list)
12432{
12433 LOGD("E");
12434 QCamera3HardwareInterface *hw =
12435 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12436 if (!hw) {
12437 LOGE("NULL camera device");
12438 return -ENODEV;
12439 }
12440 int rc = hw->configureStreams(stream_list);
12441 LOGD("X");
12442 return rc;
12443}
12444
12445/*===========================================================================
12446 * FUNCTION : construct_default_request_settings
12447 *
12448 * DESCRIPTION: Configure a settings buffer to meet the required use case
12449 *
12450 * PARAMETERS :
12451 *
12452 *
12453 * RETURN : Success: Return valid metadata
12454 * Failure: Return NULL
12455 *==========================================================================*/
12456const camera_metadata_t* QCamera3HardwareInterface::
12457 construct_default_request_settings(const struct camera3_device *device,
12458 int type)
12459{
12460
12461 LOGD("E");
12462 camera_metadata_t* fwk_metadata = NULL;
12463 QCamera3HardwareInterface *hw =
12464 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12465 if (!hw) {
12466 LOGE("NULL camera device");
12467 return NULL;
12468 }
12469
12470 fwk_metadata = hw->translateCapabilityToMetadata(type);
12471
12472 LOGD("X");
12473 return fwk_metadata;
12474}
12475
12476/*===========================================================================
12477 * FUNCTION : process_capture_request
12478 *
12479 * DESCRIPTION:
12480 *
12481 * PARAMETERS :
12482 *
12483 *
12484 * RETURN :
12485 *==========================================================================*/
12486int QCamera3HardwareInterface::process_capture_request(
12487 const struct camera3_device *device,
12488 camera3_capture_request_t *request)
12489{
12490 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012491 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012492 QCamera3HardwareInterface *hw =
12493 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12494 if (!hw) {
12495 LOGE("NULL camera device");
12496 return -EINVAL;
12497 }
12498
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012499 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012500 LOGD("X");
12501 return rc;
12502}
12503
12504/*===========================================================================
12505 * FUNCTION : dump
12506 *
12507 * DESCRIPTION:
12508 *
12509 * PARAMETERS :
12510 *
12511 *
12512 * RETURN :
12513 *==========================================================================*/
12514
12515void QCamera3HardwareInterface::dump(
12516 const struct camera3_device *device, int fd)
12517{
12518 /* Log level property is read when "adb shell dumpsys media.camera" is
12519 called so that the log level can be controlled without restarting
12520 the media server */
12521 getLogLevel();
12522
12523 LOGD("E");
12524 QCamera3HardwareInterface *hw =
12525 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12526 if (!hw) {
12527 LOGE("NULL camera device");
12528 return;
12529 }
12530
12531 hw->dump(fd);
12532 LOGD("X");
12533 return;
12534}
12535
12536/*===========================================================================
12537 * FUNCTION : flush
12538 *
12539 * DESCRIPTION:
12540 *
12541 * PARAMETERS :
12542 *
12543 *
12544 * RETURN :
12545 *==========================================================================*/
12546
12547int QCamera3HardwareInterface::flush(
12548 const struct camera3_device *device)
12549{
12550 int rc;
12551 LOGD("E");
12552 QCamera3HardwareInterface *hw =
12553 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12554 if (!hw) {
12555 LOGE("NULL camera device");
12556 return -EINVAL;
12557 }
12558
12559 pthread_mutex_lock(&hw->mMutex);
12560 // Validate current state
12561 switch (hw->mState) {
12562 case STARTED:
12563 /* valid state */
12564 break;
12565
12566 case ERROR:
12567 pthread_mutex_unlock(&hw->mMutex);
12568 hw->handleCameraDeviceError();
12569 return -ENODEV;
12570
12571 default:
12572 LOGI("Flush returned during state %d", hw->mState);
12573 pthread_mutex_unlock(&hw->mMutex);
12574 return 0;
12575 }
12576 pthread_mutex_unlock(&hw->mMutex);
12577
12578 rc = hw->flush(true /* restart channels */ );
12579 LOGD("X");
12580 return rc;
12581}
12582
12583/*===========================================================================
12584 * FUNCTION : close_camera_device
12585 *
12586 * DESCRIPTION:
12587 *
12588 * PARAMETERS :
12589 *
12590 *
12591 * RETURN :
12592 *==========================================================================*/
12593int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12594{
12595 int ret = NO_ERROR;
12596 QCamera3HardwareInterface *hw =
12597 reinterpret_cast<QCamera3HardwareInterface *>(
12598 reinterpret_cast<camera3_device_t *>(device)->priv);
12599 if (!hw) {
12600 LOGE("NULL camera device");
12601 return BAD_VALUE;
12602 }
12603
12604 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12605 delete hw;
12606 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012607 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012608 return ret;
12609}
12610
12611/*===========================================================================
12612 * FUNCTION : getWaveletDenoiseProcessPlate
12613 *
12614 * DESCRIPTION: query wavelet denoise process plate
12615 *
12616 * PARAMETERS : None
12617 *
12618 * RETURN : WNR prcocess plate value
12619 *==========================================================================*/
12620cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12621{
12622 char prop[PROPERTY_VALUE_MAX];
12623 memset(prop, 0, sizeof(prop));
12624 property_get("persist.denoise.process.plates", prop, "0");
12625 int processPlate = atoi(prop);
12626 switch(processPlate) {
12627 case 0:
12628 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12629 case 1:
12630 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12631 case 2:
12632 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12633 case 3:
12634 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12635 default:
12636 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12637 }
12638}
12639
12640
12641/*===========================================================================
12642 * FUNCTION : getTemporalDenoiseProcessPlate
12643 *
12644 * DESCRIPTION: query temporal denoise process plate
12645 *
12646 * PARAMETERS : None
12647 *
12648 * RETURN : TNR prcocess plate value
12649 *==========================================================================*/
12650cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12651{
12652 char prop[PROPERTY_VALUE_MAX];
12653 memset(prop, 0, sizeof(prop));
12654 property_get("persist.tnr.process.plates", prop, "0");
12655 int processPlate = atoi(prop);
12656 switch(processPlate) {
12657 case 0:
12658 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12659 case 1:
12660 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12661 case 2:
12662 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12663 case 3:
12664 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12665 default:
12666 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12667 }
12668}
12669
12670
12671/*===========================================================================
12672 * FUNCTION : extractSceneMode
12673 *
12674 * DESCRIPTION: Extract scene mode from frameworks set metadata
12675 *
12676 * PARAMETERS :
12677 * @frame_settings: CameraMetadata reference
12678 * @metaMode: ANDROID_CONTORL_MODE
12679 * @hal_metadata: hal metadata structure
12680 *
12681 * RETURN : None
12682 *==========================================================================*/
12683int32_t QCamera3HardwareInterface::extractSceneMode(
12684 const CameraMetadata &frame_settings, uint8_t metaMode,
12685 metadata_buffer_t *hal_metadata)
12686{
12687 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012688 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12689
12690 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12691 LOGD("Ignoring control mode OFF_KEEP_STATE");
12692 return NO_ERROR;
12693 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012694
12695 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12696 camera_metadata_ro_entry entry =
12697 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12698 if (0 == entry.count)
12699 return rc;
12700
12701 uint8_t fwk_sceneMode = entry.data.u8[0];
12702
12703 int val = lookupHalName(SCENE_MODES_MAP,
12704 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12705 fwk_sceneMode);
12706 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012707 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012708 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012709 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012710 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012711
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012712 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12713 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12714 }
12715
12716 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12717 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012718 cam_hdr_param_t hdr_params;
12719 hdr_params.hdr_enable = 1;
12720 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12721 hdr_params.hdr_need_1x = false;
12722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12723 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12724 rc = BAD_VALUE;
12725 }
12726 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012727
Thierry Strudel3d639192016-09-09 11:52:26 -070012728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12729 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012733
12734 if (mForceHdrSnapshot) {
12735 cam_hdr_param_t hdr_params;
12736 hdr_params.hdr_enable = 1;
12737 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12738 hdr_params.hdr_need_1x = false;
12739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12740 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12741 rc = BAD_VALUE;
12742 }
12743 }
12744
Thierry Strudel3d639192016-09-09 11:52:26 -070012745 return rc;
12746}
12747
12748/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012749 * FUNCTION : setVideoHdrMode
12750 *
12751 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12752 *
12753 * PARAMETERS :
12754 * @hal_metadata: hal metadata structure
12755 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12756 *
12757 * RETURN : None
12758 *==========================================================================*/
12759int32_t QCamera3HardwareInterface::setVideoHdrMode(
12760 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12761{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012762 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12763 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12764 }
12765
12766 LOGE("Invalid Video HDR mode %d!", vhdr);
12767 return BAD_VALUE;
12768}
12769
12770/*===========================================================================
12771 * FUNCTION : setSensorHDR
12772 *
12773 * DESCRIPTION: Enable/disable sensor HDR.
12774 *
12775 * PARAMETERS :
12776 * @hal_metadata: hal metadata structure
12777 * @enable: boolean whether to enable/disable sensor HDR
12778 *
12779 * RETURN : None
12780 *==========================================================================*/
12781int32_t QCamera3HardwareInterface::setSensorHDR(
12782 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12783{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012784 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012785 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12786
12787 if (enable) {
12788 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12789 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12790 #ifdef _LE_CAMERA_
12791 //Default to staggered HDR for IOT
12792 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12793 #else
12794 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12795 #endif
12796 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12797 }
12798
12799 bool isSupported = false;
12800 switch (sensor_hdr) {
12801 case CAM_SENSOR_HDR_IN_SENSOR:
12802 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12803 CAM_QCOM_FEATURE_SENSOR_HDR) {
12804 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012805 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012806 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012807 break;
12808 case CAM_SENSOR_HDR_ZIGZAG:
12809 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12810 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12811 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012812 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012813 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012814 break;
12815 case CAM_SENSOR_HDR_STAGGERED:
12816 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12817 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12818 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012819 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012820 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012821 break;
12822 case CAM_SENSOR_HDR_OFF:
12823 isSupported = true;
12824 LOGD("Turning off sensor HDR");
12825 break;
12826 default:
12827 LOGE("HDR mode %d not supported", sensor_hdr);
12828 rc = BAD_VALUE;
12829 break;
12830 }
12831
12832 if(isSupported) {
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12834 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12835 rc = BAD_VALUE;
12836 } else {
12837 if(!isVideoHdrEnable)
12838 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012839 }
12840 }
12841 return rc;
12842}
12843
12844/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012845 * FUNCTION : needRotationReprocess
12846 *
12847 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12848 *
12849 * PARAMETERS : none
12850 *
12851 * RETURN : true: needed
12852 * false: no need
12853 *==========================================================================*/
12854bool QCamera3HardwareInterface::needRotationReprocess()
12855{
12856 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12857 // current rotation is not zero, and pp has the capability to process rotation
12858 LOGH("need do reprocess for rotation");
12859 return true;
12860 }
12861
12862 return false;
12863}
12864
12865/*===========================================================================
12866 * FUNCTION : needReprocess
12867 *
12868 * DESCRIPTION: if reprocess in needed
12869 *
12870 * PARAMETERS : none
12871 *
12872 * RETURN : true: needed
12873 * false: no need
12874 *==========================================================================*/
12875bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12876{
12877 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12878 // TODO: add for ZSL HDR later
12879 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12880 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12881 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12882 return true;
12883 } else {
12884 LOGH("already post processed frame");
12885 return false;
12886 }
12887 }
12888 return needRotationReprocess();
12889}
12890
12891/*===========================================================================
12892 * FUNCTION : needJpegExifRotation
12893 *
12894 * DESCRIPTION: if rotation from jpeg is needed
12895 *
12896 * PARAMETERS : none
12897 *
12898 * RETURN : true: needed
12899 * false: no need
12900 *==========================================================================*/
12901bool QCamera3HardwareInterface::needJpegExifRotation()
12902{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012903 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012904 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12905 LOGD("Need use Jpeg EXIF Rotation");
12906 return true;
12907 }
12908 return false;
12909}
12910
12911/*===========================================================================
12912 * FUNCTION : addOfflineReprocChannel
12913 *
12914 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12915 * coming from input channel
12916 *
12917 * PARAMETERS :
12918 * @config : reprocess configuration
12919 * @inputChHandle : pointer to the input (source) channel
12920 *
12921 *
12922 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12923 *==========================================================================*/
12924QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12925 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12926{
12927 int32_t rc = NO_ERROR;
12928 QCamera3ReprocessChannel *pChannel = NULL;
12929
12930 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012931 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12932 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012933 if (NULL == pChannel) {
12934 LOGE("no mem for reprocess channel");
12935 return NULL;
12936 }
12937
12938 rc = pChannel->initialize(IS_TYPE_NONE);
12939 if (rc != NO_ERROR) {
12940 LOGE("init reprocess channel failed, ret = %d", rc);
12941 delete pChannel;
12942 return NULL;
12943 }
12944
12945 // pp feature config
12946 cam_pp_feature_config_t pp_config;
12947 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12948
12949 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12950 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12951 & CAM_QCOM_FEATURE_DSDN) {
12952 //Use CPP CDS incase h/w supports it.
12953 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12954 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12955 }
12956 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12957 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12958 }
12959
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012960 if (config.hdr_param.hdr_enable) {
12961 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12962 pp_config.hdr_param = config.hdr_param;
12963 }
12964
12965 if (mForceHdrSnapshot) {
12966 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12967 pp_config.hdr_param.hdr_enable = 1;
12968 pp_config.hdr_param.hdr_need_1x = 0;
12969 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12970 }
12971
Thierry Strudel3d639192016-09-09 11:52:26 -070012972 rc = pChannel->addReprocStreamsFromSource(pp_config,
12973 config,
12974 IS_TYPE_NONE,
12975 mMetadataChannel);
12976
12977 if (rc != NO_ERROR) {
12978 delete pChannel;
12979 return NULL;
12980 }
12981 return pChannel;
12982}
12983
12984/*===========================================================================
12985 * FUNCTION : getMobicatMask
12986 *
12987 * DESCRIPTION: returns mobicat mask
12988 *
12989 * PARAMETERS : none
12990 *
12991 * RETURN : mobicat mask
12992 *
12993 *==========================================================================*/
12994uint8_t QCamera3HardwareInterface::getMobicatMask()
12995{
12996 return m_MobicatMask;
12997}
12998
12999/*===========================================================================
13000 * FUNCTION : setMobicat
13001 *
13002 * DESCRIPTION: set Mobicat on/off.
13003 *
13004 * PARAMETERS :
13005 * @params : none
13006 *
13007 * RETURN : int32_t type of status
13008 * NO_ERROR -- success
13009 * none-zero failure code
13010 *==========================================================================*/
13011int32_t QCamera3HardwareInterface::setMobicat()
13012{
13013 char value [PROPERTY_VALUE_MAX];
13014 property_get("persist.camera.mobicat", value, "0");
13015 int32_t ret = NO_ERROR;
13016 uint8_t enableMobi = (uint8_t)atoi(value);
13017
13018 if (enableMobi) {
13019 tune_cmd_t tune_cmd;
13020 tune_cmd.type = SET_RELOAD_CHROMATIX;
13021 tune_cmd.module = MODULE_ALL;
13022 tune_cmd.value = TRUE;
13023 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13024 CAM_INTF_PARM_SET_VFE_COMMAND,
13025 tune_cmd);
13026
13027 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13028 CAM_INTF_PARM_SET_PP_COMMAND,
13029 tune_cmd);
13030 }
13031 m_MobicatMask = enableMobi;
13032
13033 return ret;
13034}
13035
13036/*===========================================================================
13037* FUNCTION : getLogLevel
13038*
13039* DESCRIPTION: Reads the log level property into a variable
13040*
13041* PARAMETERS :
13042* None
13043*
13044* RETURN :
13045* None
13046*==========================================================================*/
13047void QCamera3HardwareInterface::getLogLevel()
13048{
13049 char prop[PROPERTY_VALUE_MAX];
13050 uint32_t globalLogLevel = 0;
13051
13052 property_get("persist.camera.hal.debug", prop, "0");
13053 int val = atoi(prop);
13054 if (0 <= val) {
13055 gCamHal3LogLevel = (uint32_t)val;
13056 }
13057
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013058 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013059 gKpiDebugLevel = atoi(prop);
13060
13061 property_get("persist.camera.global.debug", prop, "0");
13062 val = atoi(prop);
13063 if (0 <= val) {
13064 globalLogLevel = (uint32_t)val;
13065 }
13066
13067 /* Highest log level among hal.logs and global.logs is selected */
13068 if (gCamHal3LogLevel < globalLogLevel)
13069 gCamHal3LogLevel = globalLogLevel;
13070
13071 return;
13072}
13073
13074/*===========================================================================
13075 * FUNCTION : validateStreamRotations
13076 *
13077 * DESCRIPTION: Check if the rotations requested are supported
13078 *
13079 * PARAMETERS :
13080 * @stream_list : streams to be configured
13081 *
13082 * RETURN : NO_ERROR on success
13083 * -EINVAL on failure
13084 *
13085 *==========================================================================*/
13086int QCamera3HardwareInterface::validateStreamRotations(
13087 camera3_stream_configuration_t *streamList)
13088{
13089 int rc = NO_ERROR;
13090
13091 /*
13092 * Loop through all streams requested in configuration
13093 * Check if unsupported rotations have been requested on any of them
13094 */
13095 for (size_t j = 0; j < streamList->num_streams; j++){
13096 camera3_stream_t *newStream = streamList->streams[j];
13097
13098 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13099 bool isImplDef = (newStream->format ==
13100 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13101 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13102 isImplDef);
13103
13104 if (isRotated && (!isImplDef || isZsl)) {
13105 LOGE("Error: Unsupported rotation of %d requested for stream"
13106 "type:%d and stream format:%d",
13107 newStream->rotation, newStream->stream_type,
13108 newStream->format);
13109 rc = -EINVAL;
13110 break;
13111 }
13112 }
13113
13114 return rc;
13115}
13116
13117/*===========================================================================
13118* FUNCTION : getFlashInfo
13119*
13120* DESCRIPTION: Retrieve information about whether the device has a flash.
13121*
13122* PARAMETERS :
13123* @cameraId : Camera id to query
13124* @hasFlash : Boolean indicating whether there is a flash device
13125* associated with given camera
13126* @flashNode : If a flash device exists, this will be its device node.
13127*
13128* RETURN :
13129* None
13130*==========================================================================*/
13131void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13132 bool& hasFlash,
13133 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13134{
13135 cam_capability_t* camCapability = gCamCapability[cameraId];
13136 if (NULL == camCapability) {
13137 hasFlash = false;
13138 flashNode[0] = '\0';
13139 } else {
13140 hasFlash = camCapability->flash_available;
13141 strlcpy(flashNode,
13142 (char*)camCapability->flash_dev_name,
13143 QCAMERA_MAX_FILEPATH_LENGTH);
13144 }
13145}
13146
13147/*===========================================================================
13148* FUNCTION : getEepromVersionInfo
13149*
13150* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13151*
13152* PARAMETERS : None
13153*
13154* RETURN : string describing EEPROM version
13155* "\0" if no such info available
13156*==========================================================================*/
13157const char *QCamera3HardwareInterface::getEepromVersionInfo()
13158{
13159 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13160}
13161
13162/*===========================================================================
13163* FUNCTION : getLdafCalib
13164*
13165* DESCRIPTION: Retrieve Laser AF calibration data
13166*
13167* PARAMETERS : None
13168*
13169* RETURN : Two uint32_t describing laser AF calibration data
13170* NULL if none is available.
13171*==========================================================================*/
13172const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13173{
13174 if (mLdafCalibExist) {
13175 return &mLdafCalib[0];
13176 } else {
13177 return NULL;
13178 }
13179}
13180
13181/*===========================================================================
13182 * FUNCTION : dynamicUpdateMetaStreamInfo
13183 *
13184 * DESCRIPTION: This function:
13185 * (1) stops all the channels
13186 * (2) returns error on pending requests and buffers
13187 * (3) sends metastream_info in setparams
13188 * (4) starts all channels
13189 * This is useful when sensor has to be restarted to apply any
13190 * settings such as frame rate from a different sensor mode
13191 *
13192 * PARAMETERS : None
13193 *
13194 * RETURN : NO_ERROR on success
13195 * Error codes on failure
13196 *
13197 *==========================================================================*/
13198int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13199{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013200 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013201 int rc = NO_ERROR;
13202
13203 LOGD("E");
13204
13205 rc = stopAllChannels();
13206 if (rc < 0) {
13207 LOGE("stopAllChannels failed");
13208 return rc;
13209 }
13210
13211 rc = notifyErrorForPendingRequests();
13212 if (rc < 0) {
13213 LOGE("notifyErrorForPendingRequests failed");
13214 return rc;
13215 }
13216
13217 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13218 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13219 "Format:%d",
13220 mStreamConfigInfo.type[i],
13221 mStreamConfigInfo.stream_sizes[i].width,
13222 mStreamConfigInfo.stream_sizes[i].height,
13223 mStreamConfigInfo.postprocess_mask[i],
13224 mStreamConfigInfo.format[i]);
13225 }
13226
13227 /* Send meta stream info once again so that ISP can start */
13228 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13229 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13230 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13231 mParameters);
13232 if (rc < 0) {
13233 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13234 }
13235
13236 rc = startAllChannels();
13237 if (rc < 0) {
13238 LOGE("startAllChannels failed");
13239 return rc;
13240 }
13241
13242 LOGD("X");
13243 return rc;
13244}
13245
13246/*===========================================================================
13247 * FUNCTION : stopAllChannels
13248 *
13249 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13250 *
13251 * PARAMETERS : None
13252 *
13253 * RETURN : NO_ERROR on success
13254 * Error codes on failure
13255 *
13256 *==========================================================================*/
13257int32_t QCamera3HardwareInterface::stopAllChannels()
13258{
13259 int32_t rc = NO_ERROR;
13260
13261 LOGD("Stopping all channels");
13262 // Stop the Streams/Channels
13263 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13264 it != mStreamInfo.end(); it++) {
13265 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13266 if (channel) {
13267 channel->stop();
13268 }
13269 (*it)->status = INVALID;
13270 }
13271
13272 if (mSupportChannel) {
13273 mSupportChannel->stop();
13274 }
13275 if (mAnalysisChannel) {
13276 mAnalysisChannel->stop();
13277 }
13278 if (mRawDumpChannel) {
13279 mRawDumpChannel->stop();
13280 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013281 if (mHdrPlusRawSrcChannel) {
13282 mHdrPlusRawSrcChannel->stop();
13283 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013284 if (mMetadataChannel) {
13285 /* If content of mStreamInfo is not 0, there is metadata stream */
13286 mMetadataChannel->stop();
13287 }
13288
13289 LOGD("All channels stopped");
13290 return rc;
13291}
13292
13293/*===========================================================================
13294 * FUNCTION : startAllChannels
13295 *
13296 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13297 *
13298 * PARAMETERS : None
13299 *
13300 * RETURN : NO_ERROR on success
13301 * Error codes on failure
13302 *
13303 *==========================================================================*/
13304int32_t QCamera3HardwareInterface::startAllChannels()
13305{
13306 int32_t rc = NO_ERROR;
13307
13308 LOGD("Start all channels ");
13309 // Start the Streams/Channels
13310 if (mMetadataChannel) {
13311 /* If content of mStreamInfo is not 0, there is metadata stream */
13312 rc = mMetadataChannel->start();
13313 if (rc < 0) {
13314 LOGE("META channel start failed");
13315 return rc;
13316 }
13317 }
13318 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13319 it != mStreamInfo.end(); it++) {
13320 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13321 if (channel) {
13322 rc = channel->start();
13323 if (rc < 0) {
13324 LOGE("channel start failed");
13325 return rc;
13326 }
13327 }
13328 }
13329 if (mAnalysisChannel) {
13330 mAnalysisChannel->start();
13331 }
13332 if (mSupportChannel) {
13333 rc = mSupportChannel->start();
13334 if (rc < 0) {
13335 LOGE("Support channel start failed");
13336 return rc;
13337 }
13338 }
13339 if (mRawDumpChannel) {
13340 rc = mRawDumpChannel->start();
13341 if (rc < 0) {
13342 LOGE("RAW dump channel start failed");
13343 return rc;
13344 }
13345 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013346 if (mHdrPlusRawSrcChannel) {
13347 rc = mHdrPlusRawSrcChannel->start();
13348 if (rc < 0) {
13349 LOGE("HDR+ RAW channel start failed");
13350 return rc;
13351 }
13352 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013353
13354 LOGD("All channels started");
13355 return rc;
13356}
13357
13358/*===========================================================================
13359 * FUNCTION : notifyErrorForPendingRequests
13360 *
13361 * DESCRIPTION: This function sends error for all the pending requests/buffers
13362 *
13363 * PARAMETERS : None
13364 *
13365 * RETURN : Error codes
13366 * NO_ERROR on success
13367 *
13368 *==========================================================================*/
13369int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13370{
13371 int32_t rc = NO_ERROR;
13372 unsigned int frameNum = 0;
13373 camera3_capture_result_t result;
13374 camera3_stream_buffer_t *pStream_Buf = NULL;
13375
13376 memset(&result, 0, sizeof(camera3_capture_result_t));
13377
13378 if (mPendingRequestsList.size() > 0) {
13379 pendingRequestIterator i = mPendingRequestsList.begin();
13380 frameNum = i->frame_number;
13381 } else {
13382 /* There might still be pending buffers even though there are
13383 no pending requests. Setting the frameNum to MAX so that
13384 all the buffers with smaller frame numbers are returned */
13385 frameNum = UINT_MAX;
13386 }
13387
13388 LOGH("Oldest frame num on mPendingRequestsList = %u",
13389 frameNum);
13390
Emilian Peev7650c122017-01-19 08:24:33 -080013391 notifyErrorFoPendingDepthData(mDepthChannel);
13392
Thierry Strudel3d639192016-09-09 11:52:26 -070013393 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13394 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13395
13396 if (req->frame_number < frameNum) {
13397 // Send Error notify to frameworks for each buffer for which
13398 // metadata buffer is already sent
13399 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13400 req->frame_number, req->mPendingBufferList.size());
13401
13402 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13403 if (NULL == pStream_Buf) {
13404 LOGE("No memory for pending buffers array");
13405 return NO_MEMORY;
13406 }
13407 memset(pStream_Buf, 0,
13408 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13409 result.result = NULL;
13410 result.frame_number = req->frame_number;
13411 result.num_output_buffers = req->mPendingBufferList.size();
13412 result.output_buffers = pStream_Buf;
13413
13414 size_t index = 0;
13415 for (auto info = req->mPendingBufferList.begin();
13416 info != req->mPendingBufferList.end(); ) {
13417
13418 camera3_notify_msg_t notify_msg;
13419 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13420 notify_msg.type = CAMERA3_MSG_ERROR;
13421 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13422 notify_msg.message.error.error_stream = info->stream;
13423 notify_msg.message.error.frame_number = req->frame_number;
13424 pStream_Buf[index].acquire_fence = -1;
13425 pStream_Buf[index].release_fence = -1;
13426 pStream_Buf[index].buffer = info->buffer;
13427 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13428 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013429 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013430 index++;
13431 // Remove buffer from list
13432 info = req->mPendingBufferList.erase(info);
13433 }
13434
13435 // Remove this request from Map
13436 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13437 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13438 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13439
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013440 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013441
13442 delete [] pStream_Buf;
13443 } else {
13444
13445 // Go through the pending requests info and send error request to framework
13446 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13447
13448 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13449
13450 // Send error notify to frameworks
13451 camera3_notify_msg_t notify_msg;
13452 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13453 notify_msg.type = CAMERA3_MSG_ERROR;
13454 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13455 notify_msg.message.error.error_stream = NULL;
13456 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013457 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013458
13459 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13460 if (NULL == pStream_Buf) {
13461 LOGE("No memory for pending buffers array");
13462 return NO_MEMORY;
13463 }
13464 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13465
13466 result.result = NULL;
13467 result.frame_number = req->frame_number;
13468 result.input_buffer = i->input_buffer;
13469 result.num_output_buffers = req->mPendingBufferList.size();
13470 result.output_buffers = pStream_Buf;
13471
13472 size_t index = 0;
13473 for (auto info = req->mPendingBufferList.begin();
13474 info != req->mPendingBufferList.end(); ) {
13475 pStream_Buf[index].acquire_fence = -1;
13476 pStream_Buf[index].release_fence = -1;
13477 pStream_Buf[index].buffer = info->buffer;
13478 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13479 pStream_Buf[index].stream = info->stream;
13480 index++;
13481 // Remove buffer from list
13482 info = req->mPendingBufferList.erase(info);
13483 }
13484
13485 // Remove this request from Map
13486 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13487 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13488 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13489
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013490 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013491 delete [] pStream_Buf;
13492 i = erasePendingRequest(i);
13493 }
13494 }
13495
13496 /* Reset pending frame Drop list and requests list */
13497 mPendingFrameDropList.clear();
13498
13499 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13500 req.mPendingBufferList.clear();
13501 }
13502 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013503 LOGH("Cleared all the pending buffers ");
13504
13505 return rc;
13506}
13507
13508bool QCamera3HardwareInterface::isOnEncoder(
13509 const cam_dimension_t max_viewfinder_size,
13510 uint32_t width, uint32_t height)
13511{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013512 return ((width > (uint32_t)max_viewfinder_size.width) ||
13513 (height > (uint32_t)max_viewfinder_size.height) ||
13514 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13515 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013516}
13517
13518/*===========================================================================
13519 * FUNCTION : setBundleInfo
13520 *
13521 * DESCRIPTION: Set bundle info for all streams that are bundle.
13522 *
13523 * PARAMETERS : None
13524 *
13525 * RETURN : NO_ERROR on success
13526 * Error codes on failure
13527 *==========================================================================*/
13528int32_t QCamera3HardwareInterface::setBundleInfo()
13529{
13530 int32_t rc = NO_ERROR;
13531
13532 if (mChannelHandle) {
13533 cam_bundle_config_t bundleInfo;
13534 memset(&bundleInfo, 0, sizeof(bundleInfo));
13535 rc = mCameraHandle->ops->get_bundle_info(
13536 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13537 if (rc != NO_ERROR) {
13538 LOGE("get_bundle_info failed");
13539 return rc;
13540 }
13541 if (mAnalysisChannel) {
13542 mAnalysisChannel->setBundleInfo(bundleInfo);
13543 }
13544 if (mSupportChannel) {
13545 mSupportChannel->setBundleInfo(bundleInfo);
13546 }
13547 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13548 it != mStreamInfo.end(); it++) {
13549 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13550 channel->setBundleInfo(bundleInfo);
13551 }
13552 if (mRawDumpChannel) {
13553 mRawDumpChannel->setBundleInfo(bundleInfo);
13554 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013555 if (mHdrPlusRawSrcChannel) {
13556 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13557 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013558 }
13559
13560 return rc;
13561}
13562
13563/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013564 * FUNCTION : setInstantAEC
13565 *
13566 * DESCRIPTION: Set Instant AEC related params.
13567 *
13568 * PARAMETERS :
13569 * @meta: CameraMetadata reference
13570 *
13571 * RETURN : NO_ERROR on success
13572 * Error codes on failure
13573 *==========================================================================*/
13574int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13575{
13576 int32_t rc = NO_ERROR;
13577 uint8_t val = 0;
13578 char prop[PROPERTY_VALUE_MAX];
13579
13580 // First try to configure instant AEC from framework metadata
13581 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13582 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13583 }
13584
13585 // If framework did not set this value, try to read from set prop.
13586 if (val == 0) {
13587 memset(prop, 0, sizeof(prop));
13588 property_get("persist.camera.instant.aec", prop, "0");
13589 val = (uint8_t)atoi(prop);
13590 }
13591
13592 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13593 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13594 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13595 mInstantAEC = val;
13596 mInstantAECSettledFrameNumber = 0;
13597 mInstantAecFrameIdxCount = 0;
13598 LOGH("instantAEC value set %d",val);
13599 if (mInstantAEC) {
13600 memset(prop, 0, sizeof(prop));
13601 property_get("persist.camera.ae.instant.bound", prop, "10");
13602 int32_t aec_frame_skip_cnt = atoi(prop);
13603 if (aec_frame_skip_cnt >= 0) {
13604 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13605 } else {
13606 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13607 rc = BAD_VALUE;
13608 }
13609 }
13610 } else {
13611 LOGE("Bad instant aec value set %d", val);
13612 rc = BAD_VALUE;
13613 }
13614 return rc;
13615}
13616
13617/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013618 * FUNCTION : get_num_overall_buffers
13619 *
13620 * DESCRIPTION: Estimate number of pending buffers across all requests.
13621 *
13622 * PARAMETERS : None
13623 *
13624 * RETURN : Number of overall pending buffers
13625 *
13626 *==========================================================================*/
13627uint32_t PendingBuffersMap::get_num_overall_buffers()
13628{
13629 uint32_t sum_buffers = 0;
13630 for (auto &req : mPendingBuffersInRequest) {
13631 sum_buffers += req.mPendingBufferList.size();
13632 }
13633 return sum_buffers;
13634}
13635
13636/*===========================================================================
13637 * FUNCTION : removeBuf
13638 *
13639 * DESCRIPTION: Remove a matching buffer from tracker.
13640 *
13641 * PARAMETERS : @buffer: image buffer for the callback
13642 *
13643 * RETURN : None
13644 *
13645 *==========================================================================*/
13646void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13647{
13648 bool buffer_found = false;
13649 for (auto req = mPendingBuffersInRequest.begin();
13650 req != mPendingBuffersInRequest.end(); req++) {
13651 for (auto k = req->mPendingBufferList.begin();
13652 k != req->mPendingBufferList.end(); k++ ) {
13653 if (k->buffer == buffer) {
13654 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13655 req->frame_number, buffer);
13656 k = req->mPendingBufferList.erase(k);
13657 if (req->mPendingBufferList.empty()) {
13658 // Remove this request from Map
13659 req = mPendingBuffersInRequest.erase(req);
13660 }
13661 buffer_found = true;
13662 break;
13663 }
13664 }
13665 if (buffer_found) {
13666 break;
13667 }
13668 }
13669 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13670 get_num_overall_buffers());
13671}
13672
13673/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013674 * FUNCTION : getBufErrStatus
13675 *
13676 * DESCRIPTION: get buffer error status
13677 *
13678 * PARAMETERS : @buffer: buffer handle
13679 *
13680 * RETURN : Error status
13681 *
13682 *==========================================================================*/
13683int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13684{
13685 for (auto& req : mPendingBuffersInRequest) {
13686 for (auto& k : req.mPendingBufferList) {
13687 if (k.buffer == buffer)
13688 return k.bufStatus;
13689 }
13690 }
13691 return CAMERA3_BUFFER_STATUS_OK;
13692}
13693
13694/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013695 * FUNCTION : setPAAFSupport
13696 *
13697 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13698 * feature mask according to stream type and filter
13699 * arrangement
13700 *
13701 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13702 * @stream_type: stream type
13703 * @filter_arrangement: filter arrangement
13704 *
13705 * RETURN : None
13706 *==========================================================================*/
13707void QCamera3HardwareInterface::setPAAFSupport(
13708 cam_feature_mask_t& feature_mask,
13709 cam_stream_type_t stream_type,
13710 cam_color_filter_arrangement_t filter_arrangement)
13711{
Thierry Strudel3d639192016-09-09 11:52:26 -070013712 switch (filter_arrangement) {
13713 case CAM_FILTER_ARRANGEMENT_RGGB:
13714 case CAM_FILTER_ARRANGEMENT_GRBG:
13715 case CAM_FILTER_ARRANGEMENT_GBRG:
13716 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013717 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13718 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013719 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013720 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13721 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013722 }
13723 break;
13724 case CAM_FILTER_ARRANGEMENT_Y:
13725 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13726 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13727 }
13728 break;
13729 default:
13730 break;
13731 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070013732 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13733 feature_mask, stream_type, filter_arrangement);
13734
13735
Thierry Strudel3d639192016-09-09 11:52:26 -070013736}
13737
13738/*===========================================================================
13739* FUNCTION : getSensorMountAngle
13740*
13741* DESCRIPTION: Retrieve sensor mount angle
13742*
13743* PARAMETERS : None
13744*
13745* RETURN : sensor mount angle in uint32_t
13746*==========================================================================*/
13747uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13748{
13749 return gCamCapability[mCameraId]->sensor_mount_angle;
13750}
13751
13752/*===========================================================================
13753* FUNCTION : getRelatedCalibrationData
13754*
13755* DESCRIPTION: Retrieve related system calibration data
13756*
13757* PARAMETERS : None
13758*
13759* RETURN : Pointer of related system calibration data
13760*==========================================================================*/
13761const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13762{
13763 return (const cam_related_system_calibration_data_t *)
13764 &(gCamCapability[mCameraId]->related_cam_calibration);
13765}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013766
13767/*===========================================================================
13768 * FUNCTION : is60HzZone
13769 *
13770 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13771 *
13772 * PARAMETERS : None
13773 *
13774 * RETURN : True if in 60Hz zone, False otherwise
13775 *==========================================================================*/
13776bool QCamera3HardwareInterface::is60HzZone()
13777{
13778 time_t t = time(NULL);
13779 struct tm lt;
13780
13781 struct tm* r = localtime_r(&t, &lt);
13782
13783 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13784 return true;
13785 else
13786 return false;
13787}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013788
13789/*===========================================================================
13790 * FUNCTION : adjustBlackLevelForCFA
13791 *
13792 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13793 * of bayer CFA (Color Filter Array).
13794 *
13795 * PARAMETERS : @input: black level pattern in the order of RGGB
13796 * @output: black level pattern in the order of CFA
13797 * @color_arrangement: CFA color arrangement
13798 *
13799 * RETURN : None
13800 *==========================================================================*/
13801template<typename T>
13802void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13803 T input[BLACK_LEVEL_PATTERN_CNT],
13804 T output[BLACK_LEVEL_PATTERN_CNT],
13805 cam_color_filter_arrangement_t color_arrangement)
13806{
13807 switch (color_arrangement) {
13808 case CAM_FILTER_ARRANGEMENT_GRBG:
13809 output[0] = input[1];
13810 output[1] = input[0];
13811 output[2] = input[3];
13812 output[3] = input[2];
13813 break;
13814 case CAM_FILTER_ARRANGEMENT_GBRG:
13815 output[0] = input[2];
13816 output[1] = input[3];
13817 output[2] = input[0];
13818 output[3] = input[1];
13819 break;
13820 case CAM_FILTER_ARRANGEMENT_BGGR:
13821 output[0] = input[3];
13822 output[1] = input[2];
13823 output[2] = input[1];
13824 output[3] = input[0];
13825 break;
13826 case CAM_FILTER_ARRANGEMENT_RGGB:
13827 output[0] = input[0];
13828 output[1] = input[1];
13829 output[2] = input[2];
13830 output[3] = input[3];
13831 break;
13832 default:
13833 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13834 break;
13835 }
13836}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013837
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013838void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13839 CameraMetadata &resultMetadata,
13840 std::shared_ptr<metadata_buffer_t> settings)
13841{
13842 if (settings == nullptr) {
13843 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13844 return;
13845 }
13846
13847 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13848 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13849 }
13850
13851 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13852 String8 str((const char *)gps_methods);
13853 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13854 }
13855
13856 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13857 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13858 }
13859
13860 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13861 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13862 }
13863
13864 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13865 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13866 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13867 }
13868
13869 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13870 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13871 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13872 }
13873
13874 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13875 int32_t fwk_thumb_size[2];
13876 fwk_thumb_size[0] = thumb_size->width;
13877 fwk_thumb_size[1] = thumb_size->height;
13878 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13879 }
13880
13881 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13882 uint8_t fwk_intent = intent[0];
13883 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13884 }
13885}
13886
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013887bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13888 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13889{
13890 if (hdrPlusRequest == nullptr) return false;
13891
13892 // Check noise reduction mode is high quality.
13893 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13894 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13895 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013896 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13897 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013898 return false;
13899 }
13900
13901 // Check edge mode is high quality.
13902 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13903 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13904 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13905 return false;
13906 }
13907
13908 if (request.num_output_buffers != 1 ||
13909 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13910 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013911 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13912 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13913 request.output_buffers[0].stream->width,
13914 request.output_buffers[0].stream->height,
13915 request.output_buffers[0].stream->format);
13916 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013917 return false;
13918 }
13919
13920 // Get a YUV buffer from pic channel.
13921 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13922 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13923 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13924 if (res != OK) {
13925 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13926 __FUNCTION__, strerror(-res), res);
13927 return false;
13928 }
13929
13930 pbcamera::StreamBuffer buffer;
13931 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080013932 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013933 buffer.data = yuvBuffer->buffer;
13934 buffer.dataSize = yuvBuffer->frame_len;
13935
13936 pbcamera::CaptureRequest pbRequest;
13937 pbRequest.id = request.frame_number;
13938 pbRequest.outputBuffers.push_back(buffer);
13939
13940 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013941 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013942 if (res != OK) {
13943 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13944 strerror(-res), res);
13945 return false;
13946 }
13947
13948 hdrPlusRequest->yuvBuffer = yuvBuffer;
13949 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13950
13951 return true;
13952}
13953
Chien-Yu Chenee335912017-02-09 17:53:20 -080013954status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13955{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013956 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013957 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13958 return -ENODEV;
13959 }
13960
13961 // Connect to HDR+ service
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013962 status_t res = gHdrPlusClient->connect(this);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013963 if (res != OK) {
13964 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
13965 strerror(-res), res);
13966 return res;
13967 }
13968
13969 // Set static metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013970 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013971 if (res != OK) {
13972 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
13973 strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013974 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080013975 return res;
13976 }
13977
13978 // Configure stream for HDR+.
13979 res = configureHdrPlusStreamsLocked();
13980 if (res != OK) {
13981 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013982 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080013983 return res;
13984 }
13985
13986 mHdrPlusModeEnabled = true;
13987 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
13988
13989 return OK;
13990}
13991
13992void QCamera3HardwareInterface::disableHdrPlusModeLocked()
13993{
13994 // Disconnect from HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013995 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
13996 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080013997 }
13998
13999 mHdrPlusModeEnabled = false;
14000 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14001}
14002
14003status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014004{
14005 pbcamera::InputConfiguration inputConfig;
14006 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14007 status_t res = OK;
14008
14009 // Configure HDR+ client streams.
14010 // Get input config.
14011 if (mHdrPlusRawSrcChannel) {
14012 // HDR+ input buffers will be provided by HAL.
14013 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14014 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14015 if (res != OK) {
14016 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14017 __FUNCTION__, strerror(-res), res);
14018 return res;
14019 }
14020
14021 inputConfig.isSensorInput = false;
14022 } else {
14023 // Sensor MIPI will send data to Easel.
14024 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014025 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014026 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14027 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14028 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14029 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14030 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14031 if (mSensorModeInfo.num_raw_bits != 10) {
14032 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14033 mSensorModeInfo.num_raw_bits);
14034 return BAD_VALUE;
14035 }
14036
14037 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014038 }
14039
14040 // Get output configurations.
14041 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014042 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014043
14044 // Easel may need to output YUV output buffers if mPictureChannel was created.
14045 pbcamera::StreamConfiguration yuvOutputConfig;
14046 if (mPictureChannel != nullptr) {
14047 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14048 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14049 if (res != OK) {
14050 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14051 __FUNCTION__, strerror(-res), res);
14052
14053 return res;
14054 }
14055
14056 outputStreamConfigs.push_back(yuvOutputConfig);
14057 }
14058
14059 // TODO: consider other channels for YUV output buffers.
14060
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014061 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014062 if (res != OK) {
14063 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14064 strerror(-res), res);
14065 return res;
14066 }
14067
14068 return OK;
14069}
14070
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014071void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14072 const camera_metadata_t &resultMetadata) {
14073 if (result != nullptr) {
14074 if (result->outputBuffers.size() != 1) {
14075 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14076 result->outputBuffers.size());
14077 return;
14078 }
14079
14080 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14081 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14082 result->outputBuffers[0].streamId);
14083 return;
14084 }
14085
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014086 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014087 HdrPlusPendingRequest pendingRequest;
14088 {
14089 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14090 auto req = mHdrPlusPendingRequests.find(result->requestId);
14091 pendingRequest = req->second;
14092 }
14093
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014094 // Update the result metadata with the settings of the HDR+ still capture request because
14095 // the result metadata belongs to a ZSL buffer.
14096 CameraMetadata metadata;
14097 metadata = &resultMetadata;
14098 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14099 camera_metadata_t* updatedResultMetadata = metadata.release();
14100
14101 QCamera3PicChannel *picChannel =
14102 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14103
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014104 // Check if dumping HDR+ YUV output is enabled.
14105 char prop[PROPERTY_VALUE_MAX];
14106 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14107 bool dumpYuvOutput = atoi(prop);
14108
14109 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014110 // Dump yuv buffer to a ppm file.
14111 pbcamera::StreamConfiguration outputConfig;
14112 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14113 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14114 if (rc == OK) {
14115 char buf[FILENAME_MAX] = {};
14116 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14117 result->requestId, result->outputBuffers[0].streamId,
14118 outputConfig.image.width, outputConfig.image.height);
14119
14120 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14121 } else {
14122 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14123 __FUNCTION__, strerror(-rc), rc);
14124 }
14125 }
14126
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014127 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14128 auto halMetadata = std::make_shared<metadata_buffer_t>();
14129 clear_metadata_buffer(halMetadata.get());
14130
14131 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14132 // encoding.
14133 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14134 halStreamId, /*minFrameDuration*/0);
14135 if (res == OK) {
14136 // Return the buffer to pic channel for encoding.
14137 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14138 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14139 halMetadata);
14140 } else {
14141 // Return the buffer without encoding.
14142 // TODO: This should not happen but we may want to report an error buffer to camera
14143 // service.
14144 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14145 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14146 strerror(-res), res);
14147 }
14148
14149 // Send HDR+ metadata to framework.
14150 {
14151 pthread_mutex_lock(&mMutex);
14152
14153 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14154 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14155 pthread_mutex_unlock(&mMutex);
14156 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014157
14158 // Remove the HDR+ pending request.
14159 {
14160 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14161 auto req = mHdrPlusPendingRequests.find(result->requestId);
14162 mHdrPlusPendingRequests.erase(req);
14163 }
14164 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014165}
14166
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014167void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14168 // TODO: Handle HDR+ capture failures and send the failure to framework.
14169 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14170 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14171
14172 // Return the buffer to pic channel.
14173 QCamera3PicChannel *picChannel =
14174 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14175 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14176
14177 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014178}
14179
Thierry Strudel3d639192016-09-09 11:52:26 -070014180}; //end namespace qcamera