blob: cd3c1462149dd7b3b385c3c421b28ee57af84d81 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Thierry Strudel3d639192016-09-09 11:52:26 -0700135
136const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
137 {"On", CAM_CDS_MODE_ON},
138 {"Off", CAM_CDS_MODE_OFF},
139 {"Auto",CAM_CDS_MODE_AUTO}
140};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700141const QCamera3HardwareInterface::QCameraMap<
142 camera_metadata_enum_android_video_hdr_mode_t,
143 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
144 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
145 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
146};
147
Thierry Strudel54dc9782017-02-15 12:12:10 -0800148const QCamera3HardwareInterface::QCameraMap<
149 camera_metadata_enum_android_binning_correction_mode_t,
150 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
151 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
152 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
153};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700154
155const QCamera3HardwareInterface::QCameraMap<
156 camera_metadata_enum_android_ir_mode_t,
157 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
158 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
159 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
160 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
161};
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraMap<
164 camera_metadata_enum_android_control_effect_mode_t,
165 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
166 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
167 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
168 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
169 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
170 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
171 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
173 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
174 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
175};
176
177const QCamera3HardwareInterface::QCameraMap<
178 camera_metadata_enum_android_control_awb_mode_t,
179 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
180 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
181 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
182 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
183 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
184 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
185 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
186 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
187 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
188 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
189};
190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_scene_mode_t,
193 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
194 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
195 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
196 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
197 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
198 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
199 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
200 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
201 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
202 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
203 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
204 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
205 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
206 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
207 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
208 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800209 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
210 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700211};
212
213const QCamera3HardwareInterface::QCameraMap<
214 camera_metadata_enum_android_control_af_mode_t,
215 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
216 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
217 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
218 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
219 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
220 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
221 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
222 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_color_correction_aberration_mode_t,
227 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
228 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
229 CAM_COLOR_CORRECTION_ABERRATION_OFF },
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
231 CAM_COLOR_CORRECTION_ABERRATION_FAST },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
233 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
234};
235
236const QCamera3HardwareInterface::QCameraMap<
237 camera_metadata_enum_android_control_ae_antibanding_mode_t,
238 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
239 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
240 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
243};
244
245const QCamera3HardwareInterface::QCameraMap<
246 camera_metadata_enum_android_control_ae_mode_t,
247 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
248 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
249 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
250 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
251 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
253};
254
255const QCamera3HardwareInterface::QCameraMap<
256 camera_metadata_enum_android_flash_mode_t,
257 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
258 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
259 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
260 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_statistics_face_detect_mode_t,
265 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
266 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
267 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
273 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
274 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
275 CAM_FOCUS_UNCALIBRATED },
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
277 CAM_FOCUS_APPROXIMATE },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
279 CAM_FOCUS_CALIBRATED }
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_lens_state_t,
284 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
285 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
286 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
287};
288
289const int32_t available_thumbnail_sizes[] = {0, 0,
290 176, 144,
291 240, 144,
292 256, 144,
293 240, 160,
294 256, 154,
295 240, 240,
296 320, 240};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_sensor_test_pattern_mode_t,
300 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
302 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
307};
308
309/* Since there is no mapping for all the options some Android enum are not listed.
310 * Also, the order in this list is important because while mapping from HAL to Android it will
311 * traverse from lower to higher index which means that for HAL values that are map to different
312 * Android values, the traverse logic will select the first one found.
313 */
314const QCamera3HardwareInterface::QCameraMap<
315 camera_metadata_enum_android_sensor_reference_illuminant1_t,
316 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
333};
334
335const QCamera3HardwareInterface::QCameraMap<
336 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
337 { 60, CAM_HFR_MODE_60FPS},
338 { 90, CAM_HFR_MODE_90FPS},
339 { 120, CAM_HFR_MODE_120FPS},
340 { 150, CAM_HFR_MODE_150FPS},
341 { 180, CAM_HFR_MODE_180FPS},
342 { 210, CAM_HFR_MODE_210FPS},
343 { 240, CAM_HFR_MODE_240FPS},
344 { 480, CAM_HFR_MODE_480FPS},
345};
346
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700347const QCamera3HardwareInterface::QCameraMap<
348 qcamera3_ext_instant_aec_mode_t,
349 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
350 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
351 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
352 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
353};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800354
355const QCamera3HardwareInterface::QCameraMap<
356 qcamera3_ext_exposure_meter_mode_t,
357 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
358 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
360 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
361 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
362 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
363 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
364 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
365};
366
367const QCamera3HardwareInterface::QCameraMap<
368 qcamera3_ext_iso_mode_t,
369 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
370 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
371 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
372 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
373 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
374 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
375 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
376 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
377 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
378};
379
Thierry Strudel3d639192016-09-09 11:52:26 -0700380camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
381 .initialize = QCamera3HardwareInterface::initialize,
382 .configure_streams = QCamera3HardwareInterface::configure_streams,
383 .register_stream_buffers = NULL,
384 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
385 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
386 .get_metadata_vendor_tag_ops = NULL,
387 .dump = QCamera3HardwareInterface::dump,
388 .flush = QCamera3HardwareInterface::flush,
389 .reserved = {0},
390};
391
392// initialise to some default value
393uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
394
395/*===========================================================================
396 * FUNCTION : QCamera3HardwareInterface
397 *
398 * DESCRIPTION: constructor of QCamera3HardwareInterface
399 *
400 * PARAMETERS :
401 * @cameraId : camera ID
402 *
403 * RETURN : none
404 *==========================================================================*/
405QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
406 const camera_module_callbacks_t *callbacks)
407 : mCameraId(cameraId),
408 mCameraHandle(NULL),
409 mCameraInitialized(false),
410 mCallbackOps(NULL),
411 mMetadataChannel(NULL),
412 mPictureChannel(NULL),
413 mRawChannel(NULL),
414 mSupportChannel(NULL),
415 mAnalysisChannel(NULL),
416 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700417 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700418 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800419 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800420 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mChannelHandle(0),
422 mFirstConfiguration(true),
423 mFlush(false),
424 mFlushPerf(false),
425 mParamHeap(NULL),
426 mParameters(NULL),
427 mPrevParameters(NULL),
428 m_bIsVideo(false),
429 m_bIs4KVideo(false),
430 m_bEisSupportedSize(false),
431 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800432 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700433 m_MobicatMask(0),
434 mMinProcessedFrameDuration(0),
435 mMinJpegFrameDuration(0),
436 mMinRawFrameDuration(0),
437 mMetaFrameCount(0U),
438 mUpdateDebugLevel(false),
439 mCallbacks(callbacks),
440 mCaptureIntent(0),
441 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700442 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800443 /* DevCamDebug metadata internal m control*/
444 mDevCamDebugMetaEnable(0),
445 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mBatchSize(0),
447 mToBeQueuedVidBufs(0),
448 mHFRVideoFps(DEFAULT_VIDEO_FPS),
449 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800450 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800451 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700452 mFirstFrameNumberInBatch(0),
453 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800454 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700455 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
456 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700457 mInstantAEC(false),
458 mResetInstantAEC(false),
459 mInstantAECSettledFrameNumber(0),
460 mAecSkipDisplayFrameBound(0),
461 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800462 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mLastCustIntentFrmNum(-1),
465 mState(CLOSED),
466 mIsDeviceLinked(false),
467 mIsMainCamera(true),
468 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700469 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800470 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800471 mHdrPlusModeEnabled(false),
472 mIsApInputUsedForHdrPlus(false),
473 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700475{
476 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 mCommon.init(gCamCapability[cameraId]);
478 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700479#ifndef USE_HAL_3_3
480 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
481#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mCameraDevice.common.close = close_camera_device;
485 mCameraDevice.ops = &mCameraOps;
486 mCameraDevice.priv = this;
487 gCamCapability[cameraId]->version = CAM_HAL_V3;
488 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
489 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
490 gCamCapability[cameraId]->min_num_pp_bufs = 3;
491
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800492 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700493
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800494 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mPendingLiveRequest = 0;
496 mCurrentRequestId = -1;
497 pthread_mutex_init(&mMutex, NULL);
498
499 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
500 mDefaultMetadata[i] = NULL;
501
502 // Getting system props of different kinds
503 char prop[PROPERTY_VALUE_MAX];
504 memset(prop, 0, sizeof(prop));
505 property_get("persist.camera.raw.dump", prop, "0");
506 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800507 property_get("persist.camera.hal3.force.hdr", prop, "0");
508 mForceHdrSnapshot = atoi(prop);
509
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 if (mEnableRawDump)
511 LOGD("Raw dump from Camera HAL enabled");
512
513 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
514 memset(mLdafCalib, 0, sizeof(mLdafCalib));
515
516 memset(prop, 0, sizeof(prop));
517 property_get("persist.camera.tnr.preview", prop, "0");
518 m_bTnrPreview = (uint8_t)atoi(prop);
519
520 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800521 property_get("persist.camera.swtnr.preview", prop, "1");
522 m_bSwTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 property_get("persist.camera.tnr.video", prop, "0");
526 m_bTnrVideo = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
529 property_get("persist.camera.avtimer.debug", prop, "0");
530 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800531 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Thierry Strudel54dc9782017-02-15 12:12:10 -0800533 memset(prop, 0, sizeof(prop));
534 property_get("persist.camera.cacmode.disable", prop, "0");
535 m_cacModeDisabled = (uint8_t)atoi(prop);
536
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 //Load and read GPU library.
538 lib_surface_utils = NULL;
539 LINK_get_surface_pixel_alignment = NULL;
540 mSurfaceStridePadding = CAM_PAD_TO_32;
541 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
542 if (lib_surface_utils) {
543 *(void **)&LINK_get_surface_pixel_alignment =
544 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
545 if (LINK_get_surface_pixel_alignment) {
546 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
547 }
548 dlclose(lib_surface_utils);
549 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700550
551 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700552}
553
554/*===========================================================================
555 * FUNCTION : ~QCamera3HardwareInterface
556 *
557 * DESCRIPTION: destructor of QCamera3HardwareInterface
558 *
559 * PARAMETERS : none
560 *
561 * RETURN : none
562 *==========================================================================*/
563QCamera3HardwareInterface::~QCamera3HardwareInterface()
564{
565 LOGD("E");
566
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800567 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700568
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800569 // Disable power hint and enable the perf lock for close camera
570 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
571 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
572
573 // unlink of dualcam during close camera
574 if (mIsDeviceLinked) {
575 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
576 &m_pDualCamCmdPtr->bundle_info;
577 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
578 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
579 pthread_mutex_lock(&gCamLock);
580
581 if (mIsMainCamera == 1) {
582 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
583 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
584 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
585 // related session id should be session id of linked session
586 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
587 } else {
588 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
589 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
590 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
591 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
592 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800593 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800594 pthread_mutex_unlock(&gCamLock);
595
596 rc = mCameraHandle->ops->set_dual_cam_cmd(
597 mCameraHandle->camera_handle);
598 if (rc < 0) {
599 LOGE("Dualcam: Unlink failed, but still proceed to close");
600 }
601 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700602
603 /* We need to stop all streams before deleting any stream */
604 if (mRawDumpChannel) {
605 mRawDumpChannel->stop();
606 }
607
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700608 if (mHdrPlusRawSrcChannel) {
609 mHdrPlusRawSrcChannel->stop();
610 }
611
Thierry Strudel3d639192016-09-09 11:52:26 -0700612 // NOTE: 'camera3_stream_t *' objects are already freed at
613 // this stage by the framework
614 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
615 it != mStreamInfo.end(); it++) {
616 QCamera3ProcessingChannel *channel = (*it)->channel;
617 if (channel) {
618 channel->stop();
619 }
620 }
621 if (mSupportChannel)
622 mSupportChannel->stop();
623
624 if (mAnalysisChannel) {
625 mAnalysisChannel->stop();
626 }
627 if (mMetadataChannel) {
628 mMetadataChannel->stop();
629 }
630 if (mChannelHandle) {
631 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
632 mChannelHandle);
633 LOGD("stopping channel %d", mChannelHandle);
634 }
635
636 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
637 it != mStreamInfo.end(); it++) {
638 QCamera3ProcessingChannel *channel = (*it)->channel;
639 if (channel)
640 delete channel;
641 free (*it);
642 }
643 if (mSupportChannel) {
644 delete mSupportChannel;
645 mSupportChannel = NULL;
646 }
647
648 if (mAnalysisChannel) {
649 delete mAnalysisChannel;
650 mAnalysisChannel = NULL;
651 }
652 if (mRawDumpChannel) {
653 delete mRawDumpChannel;
654 mRawDumpChannel = NULL;
655 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700656 if (mHdrPlusRawSrcChannel) {
657 delete mHdrPlusRawSrcChannel;
658 mHdrPlusRawSrcChannel = NULL;
659 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700660 if (mDummyBatchChannel) {
661 delete mDummyBatchChannel;
662 mDummyBatchChannel = NULL;
663 }
664
665 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800666 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700667
668 if (mMetadataChannel) {
669 delete mMetadataChannel;
670 mMetadataChannel = NULL;
671 }
672
673 /* Clean up all channels */
674 if (mCameraInitialized) {
675 if(!mFirstConfiguration){
676 //send the last unconfigure
677 cam_stream_size_info_t stream_config_info;
678 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
679 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
680 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800681 m_bIs4KVideo ? 0 :
682 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700683 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700684 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
685 stream_config_info);
686 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
687 if (rc < 0) {
688 LOGE("set_parms failed for unconfigure");
689 }
690 }
691 deinitParameters();
692 }
693
694 if (mChannelHandle) {
695 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
696 mChannelHandle);
697 LOGH("deleting channel %d", mChannelHandle);
698 mChannelHandle = 0;
699 }
700
701 if (mState != CLOSED)
702 closeCamera();
703
704 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
705 req.mPendingBufferList.clear();
706 }
707 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700708 for (pendingRequestIterator i = mPendingRequestsList.begin();
709 i != mPendingRequestsList.end();) {
710 i = erasePendingRequest(i);
711 }
712 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
713 if (mDefaultMetadata[i])
714 free_camera_metadata(mDefaultMetadata[i]);
715
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800716 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700717
718 pthread_cond_destroy(&mRequestCond);
719
720 pthread_cond_destroy(&mBuffersCond);
721
722 pthread_mutex_destroy(&mMutex);
723 LOGD("X");
724}
725
726/*===========================================================================
727 * FUNCTION : erasePendingRequest
728 *
729 * DESCRIPTION: function to erase a desired pending request after freeing any
730 * allocated memory
731 *
732 * PARAMETERS :
733 * @i : iterator pointing to pending request to be erased
734 *
735 * RETURN : iterator pointing to the next request
736 *==========================================================================*/
737QCamera3HardwareInterface::pendingRequestIterator
738 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
739{
740 if (i->input_buffer != NULL) {
741 free(i->input_buffer);
742 i->input_buffer = NULL;
743 }
744 if (i->settings != NULL)
745 free_camera_metadata((camera_metadata_t*)i->settings);
746 return mPendingRequestsList.erase(i);
747}
748
749/*===========================================================================
750 * FUNCTION : camEvtHandle
751 *
752 * DESCRIPTION: Function registered to mm-camera-interface to handle events
753 *
754 * PARAMETERS :
755 * @camera_handle : interface layer camera handle
756 * @evt : ptr to event
757 * @user_data : user data ptr
758 *
759 * RETURN : none
760 *==========================================================================*/
761void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
762 mm_camera_event_t *evt,
763 void *user_data)
764{
765 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
766 if (obj && evt) {
767 switch(evt->server_event_type) {
768 case CAM_EVENT_TYPE_DAEMON_DIED:
769 pthread_mutex_lock(&obj->mMutex);
770 obj->mState = ERROR;
771 pthread_mutex_unlock(&obj->mMutex);
772 LOGE("Fatal, camera daemon died");
773 break;
774
775 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
776 LOGD("HAL got request pull from Daemon");
777 pthread_mutex_lock(&obj->mMutex);
778 obj->mWokenUpByDaemon = true;
779 obj->unblockRequestIfNecessary();
780 pthread_mutex_unlock(&obj->mMutex);
781 break;
782
783 default:
784 LOGW("Warning: Unhandled event %d",
785 evt->server_event_type);
786 break;
787 }
788 } else {
789 LOGE("NULL user_data/evt");
790 }
791}
792
793/*===========================================================================
794 * FUNCTION : openCamera
795 *
796 * DESCRIPTION: open camera
797 *
798 * PARAMETERS :
799 * @hw_device : double ptr for camera device struct
800 *
801 * RETURN : int32_t type of status
802 * NO_ERROR -- success
803 * none-zero failure code
804 *==========================================================================*/
805int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
806{
807 int rc = 0;
808 if (mState != CLOSED) {
809 *hw_device = NULL;
810 return PERMISSION_DENIED;
811 }
812
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800813 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700814 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
815 mCameraId);
816
817 rc = openCamera();
818 if (rc == 0) {
819 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800822 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700823
Thierry Strudel3d639192016-09-09 11:52:26 -0700824 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
825 mCameraId, rc);
826
827 if (rc == NO_ERROR) {
828 mState = OPENED;
829 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800830
831 mIsApInputUsedForHdrPlus =
832 property_get_bool("persist.camera.hdrplus.apinput", false);
833 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
834 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
835
Thierry Strudel3d639192016-09-09 11:52:26 -0700836 return rc;
837}
838
839/*===========================================================================
840 * FUNCTION : openCamera
841 *
842 * DESCRIPTION: open camera
843 *
844 * PARAMETERS : none
845 *
846 * RETURN : int32_t type of status
847 * NO_ERROR -- success
848 * none-zero failure code
849 *==========================================================================*/
850int QCamera3HardwareInterface::openCamera()
851{
852 int rc = 0;
853 char value[PROPERTY_VALUE_MAX];
854
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800855 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 if (mCameraHandle) {
857 LOGE("Failure: Camera already opened");
858 return ALREADY_EXISTS;
859 }
860
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800861 if (gHdrPlusClient != nullptr) {
862 rc = gHdrPlusClient->resumeEasel();
863 if (rc != 0) {
864 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
865 return rc;
866 }
867 }
868
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
870 if (rc < 0) {
871 LOGE("Failed to reserve flash for camera id: %d",
872 mCameraId);
873 return UNKNOWN_ERROR;
874 }
875
876 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
877 if (rc) {
878 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
879 return rc;
880 }
881
882 if (!mCameraHandle) {
883 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
884 return -ENODEV;
885 }
886
887 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
888 camEvtHandle, (void *)this);
889
890 if (rc < 0) {
891 LOGE("Error, failed to register event callback");
892 /* Not closing camera here since it is already handled in destructor */
893 return FAILED_TRANSACTION;
894 }
895
896 mExifParams.debug_params =
897 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
898 if (mExifParams.debug_params) {
899 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
900 } else {
901 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
902 return NO_MEMORY;
903 }
904 mFirstConfiguration = true;
905
906 //Notify display HAL that a camera session is active.
907 //But avoid calling the same during bootup because camera service might open/close
908 //cameras at boot time during its initialization and display service will also internally
909 //wait for camera service to initialize first while calling this display API, resulting in a
910 //deadlock situation. Since boot time camera open/close calls are made only to fetch
911 //capabilities, no need of this display bw optimization.
912 //Use "service.bootanim.exit" property to know boot status.
913 property_get("service.bootanim.exit", value, "0");
914 if (atoi(value) == 1) {
915 pthread_mutex_lock(&gCamLock);
916 if (gNumCameraSessions++ == 0) {
917 setCameraLaunchStatus(true);
918 }
919 pthread_mutex_unlock(&gCamLock);
920 }
921
922 //fill the session id needed while linking dual cam
923 pthread_mutex_lock(&gCamLock);
924 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
925 &sessionId[mCameraId]);
926 pthread_mutex_unlock(&gCamLock);
927
928 if (rc < 0) {
929 LOGE("Error, failed to get sessiion id");
930 return UNKNOWN_ERROR;
931 } else {
932 //Allocate related cam sync buffer
933 //this is needed for the payload that goes along with bundling cmd for related
934 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700935 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
936 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700937 if(rc != OK) {
938 rc = NO_MEMORY;
939 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
940 return NO_MEMORY;
941 }
942
943 //Map memory for related cam sync buffer
944 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700945 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
946 m_pDualCamCmdHeap->getFd(0),
947 sizeof(cam_dual_camera_cmd_info_t),
948 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700949 if(rc < 0) {
950 LOGE("Dualcam: failed to map Related cam sync buffer");
951 rc = FAILED_TRANSACTION;
952 return NO_MEMORY;
953 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 m_pDualCamCmdPtr =
955 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700956 }
957
958 LOGH("mCameraId=%d",mCameraId);
959
960 return NO_ERROR;
961}
962
963/*===========================================================================
964 * FUNCTION : closeCamera
965 *
966 * DESCRIPTION: close camera
967 *
968 * PARAMETERS : none
969 *
970 * RETURN : int32_t type of status
971 * NO_ERROR -- success
972 * none-zero failure code
973 *==========================================================================*/
974int QCamera3HardwareInterface::closeCamera()
975{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800976 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700977 int rc = NO_ERROR;
978 char value[PROPERTY_VALUE_MAX];
979
980 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
981 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700982
983 // unmap memory for related cam sync buffer
984 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 if (NULL != m_pDualCamCmdHeap) {
987 m_pDualCamCmdHeap->deallocate();
988 delete m_pDualCamCmdHeap;
989 m_pDualCamCmdHeap = NULL;
990 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991 }
992
Thierry Strudel3d639192016-09-09 11:52:26 -0700993 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
994 mCameraHandle = NULL;
995
996 //reset session id to some invalid id
997 pthread_mutex_lock(&gCamLock);
998 sessionId[mCameraId] = 0xDEADBEEF;
999 pthread_mutex_unlock(&gCamLock);
1000
1001 //Notify display HAL that there is no active camera session
1002 //but avoid calling the same during bootup. Refer to openCamera
1003 //for more details.
1004 property_get("service.bootanim.exit", value, "0");
1005 if (atoi(value) == 1) {
1006 pthread_mutex_lock(&gCamLock);
1007 if (--gNumCameraSessions == 0) {
1008 setCameraLaunchStatus(false);
1009 }
1010 pthread_mutex_unlock(&gCamLock);
1011 }
1012
Thierry Strudel3d639192016-09-09 11:52:26 -07001013 if (mExifParams.debug_params) {
1014 free(mExifParams.debug_params);
1015 mExifParams.debug_params = NULL;
1016 }
1017 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1018 LOGW("Failed to release flash for camera id: %d",
1019 mCameraId);
1020 }
1021 mState = CLOSED;
1022 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1023 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001024
1025 if (gHdrPlusClient != nullptr) {
1026 rc = gHdrPlusClient->suspendEasel();
1027 if (rc != 0) {
1028 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1029 }
1030 }
1031
Thierry Strudel3d639192016-09-09 11:52:26 -07001032 return rc;
1033}
1034
1035/*===========================================================================
1036 * FUNCTION : initialize
1037 *
1038 * DESCRIPTION: Initialize frameworks callback functions
1039 *
1040 * PARAMETERS :
1041 * @callback_ops : callback function to frameworks
1042 *
1043 * RETURN :
1044 *
1045 *==========================================================================*/
1046int QCamera3HardwareInterface::initialize(
1047 const struct camera3_callback_ops *callback_ops)
1048{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001049 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001050 int rc;
1051
1052 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1053 pthread_mutex_lock(&mMutex);
1054
1055 // Validate current state
1056 switch (mState) {
1057 case OPENED:
1058 /* valid state */
1059 break;
1060 default:
1061 LOGE("Invalid state %d", mState);
1062 rc = -ENODEV;
1063 goto err1;
1064 }
1065
1066 rc = initParameters();
1067 if (rc < 0) {
1068 LOGE("initParamters failed %d", rc);
1069 goto err1;
1070 }
1071 mCallbackOps = callback_ops;
1072
1073 mChannelHandle = mCameraHandle->ops->add_channel(
1074 mCameraHandle->camera_handle, NULL, NULL, this);
1075 if (mChannelHandle == 0) {
1076 LOGE("add_channel failed");
1077 rc = -ENOMEM;
1078 pthread_mutex_unlock(&mMutex);
1079 return rc;
1080 }
1081
1082 pthread_mutex_unlock(&mMutex);
1083 mCameraInitialized = true;
1084 mState = INITIALIZED;
1085 LOGI("X");
1086 return 0;
1087
1088err1:
1089 pthread_mutex_unlock(&mMutex);
1090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : validateStreamDimensions
1095 *
1096 * DESCRIPTION: Check if the configuration requested are those advertised
1097 *
1098 * PARAMETERS :
1099 * @stream_list : streams to be configured
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::validateStreamDimensions(
1105 camera3_stream_configuration_t *streamList)
1106{
1107 int rc = NO_ERROR;
1108 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001109 uint32_t depthWidth =
1110 gCamCapability[mCameraId]->active_array_size.width;
1111 uint32_t depthHeight =
1112 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001113
1114 camera3_stream_t *inputStream = NULL;
1115 /*
1116 * Loop through all streams to find input stream if it exists*
1117 */
1118 for (size_t i = 0; i< streamList->num_streams; i++) {
1119 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1120 if (inputStream != NULL) {
1121 LOGE("Error, Multiple input streams requested");
1122 return -EINVAL;
1123 }
1124 inputStream = streamList->streams[i];
1125 }
1126 }
1127 /*
1128 * Loop through all streams requested in configuration
1129 * Check if unsupported sizes have been requested on any of them
1130 */
1131 for (size_t j = 0; j < streamList->num_streams; j++) {
1132 bool sizeFound = false;
1133 camera3_stream_t *newStream = streamList->streams[j];
1134
1135 uint32_t rotatedHeight = newStream->height;
1136 uint32_t rotatedWidth = newStream->width;
1137 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1138 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1139 rotatedHeight = newStream->width;
1140 rotatedWidth = newStream->height;
1141 }
1142
1143 /*
1144 * Sizes are different for each type of stream format check against
1145 * appropriate table.
1146 */
1147 switch (newStream->format) {
1148 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1149 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1150 case HAL_PIXEL_FORMAT_RAW10:
1151 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1152 for (size_t i = 0; i < count; i++) {
1153 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1154 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1155 sizeFound = true;
1156 break;
1157 }
1158 }
1159 break;
1160 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001161 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1162 //As per spec. depth cloud should be sample count / 16
1163 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1164 if ((depthSamplesCount == newStream->width) &&
1165 (1 == newStream->height)) {
1166 sizeFound = true;
1167 }
1168 break;
1169 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001170 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1171 /* Verify set size against generated sizes table */
1172 for (size_t i = 0; i < count; i++) {
1173 if (((int32_t)rotatedWidth ==
1174 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1175 ((int32_t)rotatedHeight ==
1176 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1177 sizeFound = true;
1178 break;
1179 }
1180 }
1181 break;
1182 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1183 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1184 default:
1185 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1186 || newStream->stream_type == CAMERA3_STREAM_INPUT
1187 || IS_USAGE_ZSL(newStream->usage)) {
1188 if (((int32_t)rotatedWidth ==
1189 gCamCapability[mCameraId]->active_array_size.width) &&
1190 ((int32_t)rotatedHeight ==
1191 gCamCapability[mCameraId]->active_array_size.height)) {
1192 sizeFound = true;
1193 break;
1194 }
1195 /* We could potentially break here to enforce ZSL stream
1196 * set from frameworks always is full active array size
1197 * but it is not clear from the spc if framework will always
1198 * follow that, also we have logic to override to full array
1199 * size, so keeping the logic lenient at the moment
1200 */
1201 }
1202 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1203 MAX_SIZES_CNT);
1204 for (size_t i = 0; i < count; i++) {
1205 if (((int32_t)rotatedWidth ==
1206 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1207 ((int32_t)rotatedHeight ==
1208 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1209 sizeFound = true;
1210 break;
1211 }
1212 }
1213 break;
1214 } /* End of switch(newStream->format) */
1215
1216 /* We error out even if a single stream has unsupported size set */
1217 if (!sizeFound) {
1218 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1219 rotatedWidth, rotatedHeight, newStream->format,
1220 gCamCapability[mCameraId]->active_array_size.width,
1221 gCamCapability[mCameraId]->active_array_size.height);
1222 rc = -EINVAL;
1223 break;
1224 }
1225 } /* End of for each stream */
1226 return rc;
1227}
1228
1229/*==============================================================================
1230 * FUNCTION : isSupportChannelNeeded
1231 *
1232 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1233 *
1234 * PARAMETERS :
1235 * @stream_list : streams to be configured
1236 * @stream_config_info : the config info for streams to be configured
1237 *
1238 * RETURN : Boolen true/false decision
1239 *
1240 *==========================================================================*/
1241bool QCamera3HardwareInterface::isSupportChannelNeeded(
1242 camera3_stream_configuration_t *streamList,
1243 cam_stream_size_info_t stream_config_info)
1244{
1245 uint32_t i;
1246 bool pprocRequested = false;
1247 /* Check for conditions where PProc pipeline does not have any streams*/
1248 for (i = 0; i < stream_config_info.num_streams; i++) {
1249 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1250 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1251 pprocRequested = true;
1252 break;
1253 }
1254 }
1255
1256 if (pprocRequested == false )
1257 return true;
1258
1259 /* Dummy stream needed if only raw or jpeg streams present */
1260 for (i = 0; i < streamList->num_streams; i++) {
1261 switch(streamList->streams[i]->format) {
1262 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1263 case HAL_PIXEL_FORMAT_RAW10:
1264 case HAL_PIXEL_FORMAT_RAW16:
1265 case HAL_PIXEL_FORMAT_BLOB:
1266 break;
1267 default:
1268 return false;
1269 }
1270 }
1271 return true;
1272}
1273
1274/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001275 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001277 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001278 *
1279 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001280 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 *
1282 * RETURN : int32_t type of status
1283 * NO_ERROR -- success
1284 * none-zero failure code
1285 *
1286 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001287int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001288{
1289 int32_t rc = NO_ERROR;
1290
1291 cam_dimension_t max_dim = {0, 0};
1292 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1293 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1294 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1295 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1296 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1297 }
1298
1299 clear_metadata_buffer(mParameters);
1300
1301 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1302 max_dim);
1303 if (rc != NO_ERROR) {
1304 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1305 return rc;
1306 }
1307
1308 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1309 if (rc != NO_ERROR) {
1310 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1311 return rc;
1312 }
1313
1314 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001315 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001316
1317 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1318 mParameters);
1319 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001320 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001321 return rc;
1322 }
1323
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001324 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001325 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1326 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1327 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1328 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1329 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001330
1331 return rc;
1332}
1333
1334/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001335 * FUNCTION : addToPPFeatureMask
1336 *
1337 * DESCRIPTION: add additional features to pp feature mask based on
1338 * stream type and usecase
1339 *
1340 * PARAMETERS :
1341 * @stream_format : stream type for feature mask
1342 * @stream_idx : stream idx within postprocess_mask list to change
1343 *
1344 * RETURN : NULL
1345 *
1346 *==========================================================================*/
1347void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1348 uint32_t stream_idx)
1349{
1350 char feature_mask_value[PROPERTY_VALUE_MAX];
1351 cam_feature_mask_t feature_mask;
1352 int args_converted;
1353 int property_len;
1354
1355 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001356#ifdef _LE_CAMERA_
1357 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1358 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1359 property_len = property_get("persist.camera.hal3.feature",
1360 feature_mask_value, swtnr_feature_mask_value);
1361#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001362 property_len = property_get("persist.camera.hal3.feature",
1363 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001364#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001365 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1366 (feature_mask_value[1] == 'x')) {
1367 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1368 } else {
1369 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1370 }
1371 if (1 != args_converted) {
1372 feature_mask = 0;
1373 LOGE("Wrong feature mask %s", feature_mask_value);
1374 return;
1375 }
1376
1377 switch (stream_format) {
1378 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1379 /* Add LLVD to pp feature mask only if video hint is enabled */
1380 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1381 mStreamConfigInfo.postprocess_mask[stream_idx]
1382 |= CAM_QTI_FEATURE_SW_TNR;
1383 LOGH("Added SW TNR to pp feature mask");
1384 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1385 mStreamConfigInfo.postprocess_mask[stream_idx]
1386 |= CAM_QCOM_FEATURE_LLVD;
1387 LOGH("Added LLVD SeeMore to pp feature mask");
1388 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001389 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1390 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1391 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1392 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001393 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1394 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1395 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1396 CAM_QTI_FEATURE_BINNING_CORRECTION;
1397 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001398 break;
1399 }
1400 default:
1401 break;
1402 }
1403 LOGD("PP feature mask %llx",
1404 mStreamConfigInfo.postprocess_mask[stream_idx]);
1405}
1406
1407/*==============================================================================
1408 * FUNCTION : updateFpsInPreviewBuffer
1409 *
1410 * DESCRIPTION: update FPS information in preview buffer.
1411 *
1412 * PARAMETERS :
1413 * @metadata : pointer to metadata buffer
1414 * @frame_number: frame_number to look for in pending buffer list
1415 *
1416 * RETURN : None
1417 *
1418 *==========================================================================*/
1419void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1420 uint32_t frame_number)
1421{
1422 // Mark all pending buffers for this particular request
1423 // with corresponding framerate information
1424 for (List<PendingBuffersInRequest>::iterator req =
1425 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1426 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1427 for(List<PendingBufferInfo>::iterator j =
1428 req->mPendingBufferList.begin();
1429 j != req->mPendingBufferList.end(); j++) {
1430 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1431 if ((req->frame_number == frame_number) &&
1432 (channel->getStreamTypeMask() &
1433 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1434 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1435 CAM_INTF_PARM_FPS_RANGE, metadata) {
1436 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1437 struct private_handle_t *priv_handle =
1438 (struct private_handle_t *)(*(j->buffer));
1439 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1440 }
1441 }
1442 }
1443 }
1444}
1445
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001446/*==============================================================================
1447 * FUNCTION : updateTimeStampInPendingBuffers
1448 *
1449 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1450 * of a frame number
1451 *
1452 * PARAMETERS :
1453 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1454 * @timestamp : timestamp to be set
1455 *
1456 * RETURN : None
1457 *
1458 *==========================================================================*/
1459void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1460 uint32_t frameNumber, nsecs_t timestamp)
1461{
1462 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1463 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1464 if (req->frame_number != frameNumber)
1465 continue;
1466
1467 for (auto k = req->mPendingBufferList.begin();
1468 k != req->mPendingBufferList.end(); k++ ) {
1469 struct private_handle_t *priv_handle =
1470 (struct private_handle_t *) (*(k->buffer));
1471 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1472 }
1473 }
1474 return;
1475}
1476
Thierry Strudel3d639192016-09-09 11:52:26 -07001477/*===========================================================================
1478 * FUNCTION : configureStreams
1479 *
1480 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1481 * and output streams.
1482 *
1483 * PARAMETERS :
1484 * @stream_list : streams to be configured
1485 *
1486 * RETURN :
1487 *
1488 *==========================================================================*/
1489int QCamera3HardwareInterface::configureStreams(
1490 camera3_stream_configuration_t *streamList)
1491{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001492 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001493 int rc = 0;
1494
1495 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001496 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001498 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001499
1500 return rc;
1501}
1502
1503/*===========================================================================
1504 * FUNCTION : configureStreamsPerfLocked
1505 *
1506 * DESCRIPTION: configureStreams while perfLock is held.
1507 *
1508 * PARAMETERS :
1509 * @stream_list : streams to be configured
1510 *
1511 * RETURN : int32_t type of status
1512 * NO_ERROR -- success
1513 * none-zero failure code
1514 *==========================================================================*/
1515int QCamera3HardwareInterface::configureStreamsPerfLocked(
1516 camera3_stream_configuration_t *streamList)
1517{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001518 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001519 int rc = 0;
1520
1521 // Sanity check stream_list
1522 if (streamList == NULL) {
1523 LOGE("NULL stream configuration");
1524 return BAD_VALUE;
1525 }
1526 if (streamList->streams == NULL) {
1527 LOGE("NULL stream list");
1528 return BAD_VALUE;
1529 }
1530
1531 if (streamList->num_streams < 1) {
1532 LOGE("Bad number of streams requested: %d",
1533 streamList->num_streams);
1534 return BAD_VALUE;
1535 }
1536
1537 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1538 LOGE("Maximum number of streams %d exceeded: %d",
1539 MAX_NUM_STREAMS, streamList->num_streams);
1540 return BAD_VALUE;
1541 }
1542
1543 mOpMode = streamList->operation_mode;
1544 LOGD("mOpMode: %d", mOpMode);
1545
1546 /* first invalidate all the steams in the mStreamList
1547 * if they appear again, they will be validated */
1548 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1549 it != mStreamInfo.end(); it++) {
1550 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1551 if (channel) {
1552 channel->stop();
1553 }
1554 (*it)->status = INVALID;
1555 }
1556
1557 if (mRawDumpChannel) {
1558 mRawDumpChannel->stop();
1559 delete mRawDumpChannel;
1560 mRawDumpChannel = NULL;
1561 }
1562
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001563 if (mHdrPlusRawSrcChannel) {
1564 mHdrPlusRawSrcChannel->stop();
1565 delete mHdrPlusRawSrcChannel;
1566 mHdrPlusRawSrcChannel = NULL;
1567 }
1568
Thierry Strudel3d639192016-09-09 11:52:26 -07001569 if (mSupportChannel)
1570 mSupportChannel->stop();
1571
1572 if (mAnalysisChannel) {
1573 mAnalysisChannel->stop();
1574 }
1575 if (mMetadataChannel) {
1576 /* If content of mStreamInfo is not 0, there is metadata stream */
1577 mMetadataChannel->stop();
1578 }
1579 if (mChannelHandle) {
1580 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1581 mChannelHandle);
1582 LOGD("stopping channel %d", mChannelHandle);
1583 }
1584
1585 pthread_mutex_lock(&mMutex);
1586
1587 // Check state
1588 switch (mState) {
1589 case INITIALIZED:
1590 case CONFIGURED:
1591 case STARTED:
1592 /* valid state */
1593 break;
1594 default:
1595 LOGE("Invalid state %d", mState);
1596 pthread_mutex_unlock(&mMutex);
1597 return -ENODEV;
1598 }
1599
1600 /* Check whether we have video stream */
1601 m_bIs4KVideo = false;
1602 m_bIsVideo = false;
1603 m_bEisSupportedSize = false;
1604 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001605 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001606 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001607 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 uint32_t videoWidth = 0U;
1609 uint32_t videoHeight = 0U;
1610 size_t rawStreamCnt = 0;
1611 size_t stallStreamCnt = 0;
1612 size_t processedStreamCnt = 0;
1613 // Number of streams on ISP encoder path
1614 size_t numStreamsOnEncoder = 0;
1615 size_t numYuv888OnEncoder = 0;
1616 bool bYuv888OverrideJpeg = false;
1617 cam_dimension_t largeYuv888Size = {0, 0};
1618 cam_dimension_t maxViewfinderSize = {0, 0};
1619 bool bJpegExceeds4K = false;
1620 bool bJpegOnEncoder = false;
1621 bool bUseCommonFeatureMask = false;
1622 cam_feature_mask_t commonFeatureMask = 0;
1623 bool bSmallJpegSize = false;
1624 uint32_t width_ratio;
1625 uint32_t height_ratio;
1626 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1627 camera3_stream_t *inputStream = NULL;
1628 bool isJpeg = false;
1629 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001630 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001631
1632 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1633
1634 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001635 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001636 uint8_t eis_prop_set;
1637 uint32_t maxEisWidth = 0;
1638 uint32_t maxEisHeight = 0;
1639
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001640 // Initialize all instant AEC related variables
1641 mInstantAEC = false;
1642 mResetInstantAEC = false;
1643 mInstantAECSettledFrameNumber = 0;
1644 mAecSkipDisplayFrameBound = 0;
1645 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001646 mCurrFeatureState = 0;
1647 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001648
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1650
1651 size_t count = IS_TYPE_MAX;
1652 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1653 for (size_t i = 0; i < count; i++) {
1654 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001655 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1656 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001657 break;
1658 }
1659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001660 count = CAM_OPT_STAB_MAX;
1661 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1662 for (size_t i = 0; i < count; i++) {
1663 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1664 oisSupported = true;
1665 break;
1666 }
1667 }
1668
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001669 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001670 maxEisWidth = MAX_EIS_WIDTH;
1671 maxEisHeight = MAX_EIS_HEIGHT;
1672 }
1673
1674 /* EIS setprop control */
1675 char eis_prop[PROPERTY_VALUE_MAX];
1676 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001677 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001678 eis_prop_set = (uint8_t)atoi(eis_prop);
1679
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001680 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001681 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1682
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001683 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1684 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1685
Thierry Strudel3d639192016-09-09 11:52:26 -07001686 /* stream configurations */
1687 for (size_t i = 0; i < streamList->num_streams; i++) {
1688 camera3_stream_t *newStream = streamList->streams[i];
1689 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1690 "height = %d, rotation = %d, usage = 0x%x",
1691 i, newStream->stream_type, newStream->format,
1692 newStream->width, newStream->height, newStream->rotation,
1693 newStream->usage);
1694 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1695 newStream->stream_type == CAMERA3_STREAM_INPUT){
1696 isZsl = true;
1697 }
1698 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1699 inputStream = newStream;
1700 }
1701
Emilian Peev7650c122017-01-19 08:24:33 -08001702 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1703 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001704 isJpeg = true;
1705 jpegSize.width = newStream->width;
1706 jpegSize.height = newStream->height;
1707 if (newStream->width > VIDEO_4K_WIDTH ||
1708 newStream->height > VIDEO_4K_HEIGHT)
1709 bJpegExceeds4K = true;
1710 }
1711
1712 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1713 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1714 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001715 // In HAL3 we can have multiple different video streams.
1716 // The variables video width and height are used below as
1717 // dimensions of the biggest of them
1718 if (videoWidth < newStream->width ||
1719 videoHeight < newStream->height) {
1720 videoWidth = newStream->width;
1721 videoHeight = newStream->height;
1722 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001723 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1724 (VIDEO_4K_HEIGHT <= newStream->height)) {
1725 m_bIs4KVideo = true;
1726 }
1727 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1728 (newStream->height <= maxEisHeight);
1729 }
1730 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1731 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1732 switch (newStream->format) {
1733 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001734 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1735 depthPresent = true;
1736 break;
1737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001738 stallStreamCnt++;
1739 if (isOnEncoder(maxViewfinderSize, newStream->width,
1740 newStream->height)) {
1741 numStreamsOnEncoder++;
1742 bJpegOnEncoder = true;
1743 }
1744 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1745 newStream->width);
1746 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1747 newStream->height);;
1748 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1749 "FATAL: max_downscale_factor cannot be zero and so assert");
1750 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1751 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1752 LOGH("Setting small jpeg size flag to true");
1753 bSmallJpegSize = true;
1754 }
1755 break;
1756 case HAL_PIXEL_FORMAT_RAW10:
1757 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1758 case HAL_PIXEL_FORMAT_RAW16:
1759 rawStreamCnt++;
1760 break;
1761 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1762 processedStreamCnt++;
1763 if (isOnEncoder(maxViewfinderSize, newStream->width,
1764 newStream->height)) {
1765 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1766 !IS_USAGE_ZSL(newStream->usage)) {
1767 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1768 }
1769 numStreamsOnEncoder++;
1770 }
1771 break;
1772 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1773 processedStreamCnt++;
1774 if (isOnEncoder(maxViewfinderSize, newStream->width,
1775 newStream->height)) {
1776 // If Yuv888 size is not greater than 4K, set feature mask
1777 // to SUPERSET so that it support concurrent request on
1778 // YUV and JPEG.
1779 if (newStream->width <= VIDEO_4K_WIDTH &&
1780 newStream->height <= VIDEO_4K_HEIGHT) {
1781 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1782 }
1783 numStreamsOnEncoder++;
1784 numYuv888OnEncoder++;
1785 largeYuv888Size.width = newStream->width;
1786 largeYuv888Size.height = newStream->height;
1787 }
1788 break;
1789 default:
1790 processedStreamCnt++;
1791 if (isOnEncoder(maxViewfinderSize, newStream->width,
1792 newStream->height)) {
1793 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1794 numStreamsOnEncoder++;
1795 }
1796 break;
1797 }
1798
1799 }
1800 }
1801
1802 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1803 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1804 !m_bIsVideo) {
1805 m_bEisEnable = false;
1806 }
1807
Thierry Strudel54dc9782017-02-15 12:12:10 -08001808 uint8_t forceEnableTnr = 0;
1809 char tnr_prop[PROPERTY_VALUE_MAX];
1810 memset(tnr_prop, 0, sizeof(tnr_prop));
1811 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1812 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1813
Thierry Strudel3d639192016-09-09 11:52:26 -07001814 /* Logic to enable/disable TNR based on specific config size/etc.*/
1815 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1816 ((videoWidth == 1920 && videoHeight == 1080) ||
1817 (videoWidth == 1280 && videoHeight == 720)) &&
1818 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1819 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001820 else if (forceEnableTnr)
1821 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001822
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001823 char videoHdrProp[PROPERTY_VALUE_MAX];
1824 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1825 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1826 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1827
1828 if (hdr_mode_prop == 1 && m_bIsVideo &&
1829 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1830 m_bVideoHdrEnabled = true;
1831 else
1832 m_bVideoHdrEnabled = false;
1833
1834
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 /* Check if num_streams is sane */
1836 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1837 rawStreamCnt > MAX_RAW_STREAMS ||
1838 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1839 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1840 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1841 pthread_mutex_unlock(&mMutex);
1842 return -EINVAL;
1843 }
1844 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001845 if (isZsl && m_bIs4KVideo) {
1846 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001847 pthread_mutex_unlock(&mMutex);
1848 return -EINVAL;
1849 }
1850 /* Check if stream sizes are sane */
1851 if (numStreamsOnEncoder > 2) {
1852 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1853 pthread_mutex_unlock(&mMutex);
1854 return -EINVAL;
1855 } else if (1 < numStreamsOnEncoder){
1856 bUseCommonFeatureMask = true;
1857 LOGH("Multiple streams above max viewfinder size, common mask needed");
1858 }
1859
1860 /* Check if BLOB size is greater than 4k in 4k recording case */
1861 if (m_bIs4KVideo && bJpegExceeds4K) {
1862 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1863 pthread_mutex_unlock(&mMutex);
1864 return -EINVAL;
1865 }
1866
Emilian Peev7650c122017-01-19 08:24:33 -08001867 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1868 depthPresent) {
1869 LOGE("HAL doesn't support depth streams in HFR mode!");
1870 pthread_mutex_unlock(&mMutex);
1871 return -EINVAL;
1872 }
1873
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1875 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1876 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1877 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1878 // configurations:
1879 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1880 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1881 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1882 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1883 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1884 __func__);
1885 pthread_mutex_unlock(&mMutex);
1886 return -EINVAL;
1887 }
1888
1889 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1890 // the YUV stream's size is greater or equal to the JPEG size, set common
1891 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1892 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1893 jpegSize.width, jpegSize.height) &&
1894 largeYuv888Size.width > jpegSize.width &&
1895 largeYuv888Size.height > jpegSize.height) {
1896 bYuv888OverrideJpeg = true;
1897 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1898 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900
1901 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1902 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1903 commonFeatureMask);
1904 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1905 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1906
1907 rc = validateStreamDimensions(streamList);
1908 if (rc == NO_ERROR) {
1909 rc = validateStreamRotations(streamList);
1910 }
1911 if (rc != NO_ERROR) {
1912 LOGE("Invalid stream configuration requested!");
1913 pthread_mutex_unlock(&mMutex);
1914 return rc;
1915 }
1916
1917 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1918 for (size_t i = 0; i < streamList->num_streams; i++) {
1919 camera3_stream_t *newStream = streamList->streams[i];
1920 LOGH("newStream type = %d, stream format = %d "
1921 "stream size : %d x %d, stream rotation = %d",
1922 newStream->stream_type, newStream->format,
1923 newStream->width, newStream->height, newStream->rotation);
1924 //if the stream is in the mStreamList validate it
1925 bool stream_exists = false;
1926 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1927 it != mStreamInfo.end(); it++) {
1928 if ((*it)->stream == newStream) {
1929 QCamera3ProcessingChannel *channel =
1930 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1931 stream_exists = true;
1932 if (channel)
1933 delete channel;
1934 (*it)->status = VALID;
1935 (*it)->stream->priv = NULL;
1936 (*it)->channel = NULL;
1937 }
1938 }
1939 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1940 //new stream
1941 stream_info_t* stream_info;
1942 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1943 if (!stream_info) {
1944 LOGE("Could not allocate stream info");
1945 rc = -ENOMEM;
1946 pthread_mutex_unlock(&mMutex);
1947 return rc;
1948 }
1949 stream_info->stream = newStream;
1950 stream_info->status = VALID;
1951 stream_info->channel = NULL;
1952 mStreamInfo.push_back(stream_info);
1953 }
1954 /* Covers Opaque ZSL and API1 F/W ZSL */
1955 if (IS_USAGE_ZSL(newStream->usage)
1956 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1957 if (zslStream != NULL) {
1958 LOGE("Multiple input/reprocess streams requested!");
1959 pthread_mutex_unlock(&mMutex);
1960 return BAD_VALUE;
1961 }
1962 zslStream = newStream;
1963 }
1964 /* Covers YUV reprocess */
1965 if (inputStream != NULL) {
1966 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1967 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1968 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1969 && inputStream->width == newStream->width
1970 && inputStream->height == newStream->height) {
1971 if (zslStream != NULL) {
1972 /* This scenario indicates multiple YUV streams with same size
1973 * as input stream have been requested, since zsl stream handle
1974 * is solely use for the purpose of overriding the size of streams
1975 * which share h/w streams we will just make a guess here as to
1976 * which of the stream is a ZSL stream, this will be refactored
1977 * once we make generic logic for streams sharing encoder output
1978 */
1979 LOGH("Warning, Multiple ip/reprocess streams requested!");
1980 }
1981 zslStream = newStream;
1982 }
1983 }
1984 }
1985
1986 /* If a zsl stream is set, we know that we have configured at least one input or
1987 bidirectional stream */
1988 if (NULL != zslStream) {
1989 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1990 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1991 mInputStreamInfo.format = zslStream->format;
1992 mInputStreamInfo.usage = zslStream->usage;
1993 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1994 mInputStreamInfo.dim.width,
1995 mInputStreamInfo.dim.height,
1996 mInputStreamInfo.format, mInputStreamInfo.usage);
1997 }
1998
1999 cleanAndSortStreamInfo();
2000 if (mMetadataChannel) {
2001 delete mMetadataChannel;
2002 mMetadataChannel = NULL;
2003 }
2004 if (mSupportChannel) {
2005 delete mSupportChannel;
2006 mSupportChannel = NULL;
2007 }
2008
2009 if (mAnalysisChannel) {
2010 delete mAnalysisChannel;
2011 mAnalysisChannel = NULL;
2012 }
2013
2014 if (mDummyBatchChannel) {
2015 delete mDummyBatchChannel;
2016 mDummyBatchChannel = NULL;
2017 }
2018
Emilian Peev7650c122017-01-19 08:24:33 -08002019 if (mDepthChannel) {
2020 mDepthChannel = NULL;
2021 }
2022
Thierry Strudel2896d122017-02-23 19:18:03 -08002023 char is_type_value[PROPERTY_VALUE_MAX];
2024 property_get("persist.camera.is_type", is_type_value, "4");
2025 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2026
Thierry Strudel3d639192016-09-09 11:52:26 -07002027 //Create metadata channel and initialize it
2028 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2029 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2030 gCamCapability[mCameraId]->color_arrangement);
2031 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2032 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002033 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002034 if (mMetadataChannel == NULL) {
2035 LOGE("failed to allocate metadata channel");
2036 rc = -ENOMEM;
2037 pthread_mutex_unlock(&mMutex);
2038 return rc;
2039 }
2040 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2041 if (rc < 0) {
2042 LOGE("metadata channel initialization failed");
2043 delete mMetadataChannel;
2044 mMetadataChannel = NULL;
2045 pthread_mutex_unlock(&mMutex);
2046 return rc;
2047 }
2048
Thierry Strudel2896d122017-02-23 19:18:03 -08002049 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002050 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002051 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002052 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2053 /* Allocate channel objects for the requested streams */
2054 for (size_t i = 0; i < streamList->num_streams; i++) {
2055 camera3_stream_t *newStream = streamList->streams[i];
2056 uint32_t stream_usage = newStream->usage;
2057 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2058 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2059 struct camera_info *p_info = NULL;
2060 pthread_mutex_lock(&gCamLock);
2061 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2062 pthread_mutex_unlock(&gCamLock);
2063 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2064 || IS_USAGE_ZSL(newStream->usage)) &&
2065 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002066 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002067 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002068 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2069 if (bUseCommonFeatureMask)
2070 zsl_ppmask = commonFeatureMask;
2071 else
2072 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002073 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002074 if (numStreamsOnEncoder > 0)
2075 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2076 else
2077 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002079 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002080 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002081 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002082 LOGH("Input stream configured, reprocess config");
2083 } else {
2084 //for non zsl streams find out the format
2085 switch (newStream->format) {
2086 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2087 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002088 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2090 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2091 /* add additional features to pp feature mask */
2092 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2093 mStreamConfigInfo.num_streams);
2094
2095 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2096 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2097 CAM_STREAM_TYPE_VIDEO;
2098 if (m_bTnrEnabled && m_bTnrVideo) {
2099 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2100 CAM_QCOM_FEATURE_CPP_TNR;
2101 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2102 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2103 ~CAM_QCOM_FEATURE_CDS;
2104 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002105 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2106 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2107 CAM_QTI_FEATURE_PPEISCORE;
2108 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002109 } else {
2110 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2111 CAM_STREAM_TYPE_PREVIEW;
2112 if (m_bTnrEnabled && m_bTnrPreview) {
2113 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2114 CAM_QCOM_FEATURE_CPP_TNR;
2115 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2116 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2117 ~CAM_QCOM_FEATURE_CDS;
2118 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002119 if(!m_bSwTnrPreview) {
2120 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2121 ~CAM_QTI_FEATURE_SW_TNR;
2122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 padding_info.width_padding = mSurfaceStridePadding;
2124 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002125 previewSize.width = (int32_t)newStream->width;
2126 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002127 }
2128 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2129 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2130 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2131 newStream->height;
2132 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2133 newStream->width;
2134 }
2135 }
2136 break;
2137 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002138 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002139 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2140 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2141 if (bUseCommonFeatureMask)
2142 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2143 commonFeatureMask;
2144 else
2145 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2146 CAM_QCOM_FEATURE_NONE;
2147 } else {
2148 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2149 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2150 }
2151 break;
2152 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002153 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002154 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2155 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2156 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2157 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2158 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002159 /* Remove rotation if it is not supported
2160 for 4K LiveVideo snapshot case (online processing) */
2161 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2162 CAM_QCOM_FEATURE_ROTATION)) {
2163 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2164 &= ~CAM_QCOM_FEATURE_ROTATION;
2165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002166 } else {
2167 if (bUseCommonFeatureMask &&
2168 isOnEncoder(maxViewfinderSize, newStream->width,
2169 newStream->height)) {
2170 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2171 } else {
2172 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2173 }
2174 }
2175 if (isZsl) {
2176 if (zslStream) {
2177 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2178 (int32_t)zslStream->width;
2179 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2180 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002181 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2182 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 } else {
2184 LOGE("Error, No ZSL stream identified");
2185 pthread_mutex_unlock(&mMutex);
2186 return -EINVAL;
2187 }
2188 } else if (m_bIs4KVideo) {
2189 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2190 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2191 } else if (bYuv888OverrideJpeg) {
2192 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2193 (int32_t)largeYuv888Size.width;
2194 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2195 (int32_t)largeYuv888Size.height;
2196 }
2197 break;
2198 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2199 case HAL_PIXEL_FORMAT_RAW16:
2200 case HAL_PIXEL_FORMAT_RAW10:
2201 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2202 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2203 isRawStreamRequested = true;
2204 break;
2205 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002206 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002207 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2208 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2209 break;
2210 }
2211 }
2212
2213 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2214 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2215 gCamCapability[mCameraId]->color_arrangement);
2216
2217 if (newStream->priv == NULL) {
2218 //New stream, construct channel
2219 switch (newStream->stream_type) {
2220 case CAMERA3_STREAM_INPUT:
2221 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2222 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2223 break;
2224 case CAMERA3_STREAM_BIDIRECTIONAL:
2225 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2226 GRALLOC_USAGE_HW_CAMERA_WRITE;
2227 break;
2228 case CAMERA3_STREAM_OUTPUT:
2229 /* For video encoding stream, set read/write rarely
2230 * flag so that they may be set to un-cached */
2231 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2232 newStream->usage |=
2233 (GRALLOC_USAGE_SW_READ_RARELY |
2234 GRALLOC_USAGE_SW_WRITE_RARELY |
2235 GRALLOC_USAGE_HW_CAMERA_WRITE);
2236 else if (IS_USAGE_ZSL(newStream->usage))
2237 {
2238 LOGD("ZSL usage flag skipping");
2239 }
2240 else if (newStream == zslStream
2241 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2242 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2243 } else
2244 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2245 break;
2246 default:
2247 LOGE("Invalid stream_type %d", newStream->stream_type);
2248 break;
2249 }
2250
2251 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2252 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2253 QCamera3ProcessingChannel *channel = NULL;
2254 switch (newStream->format) {
2255 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2256 if ((newStream->usage &
2257 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2258 (streamList->operation_mode ==
2259 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2260 ) {
2261 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2262 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002263 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002264 this,
2265 newStream,
2266 (cam_stream_type_t)
2267 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2268 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2269 mMetadataChannel,
2270 0); //heap buffers are not required for HFR video channel
2271 if (channel == NULL) {
2272 LOGE("allocation of channel failed");
2273 pthread_mutex_unlock(&mMutex);
2274 return -ENOMEM;
2275 }
2276 //channel->getNumBuffers() will return 0 here so use
2277 //MAX_INFLIGH_HFR_REQUESTS
2278 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2279 newStream->priv = channel;
2280 LOGI("num video buffers in HFR mode: %d",
2281 MAX_INFLIGHT_HFR_REQUESTS);
2282 } else {
2283 /* Copy stream contents in HFR preview only case to create
2284 * dummy batch channel so that sensor streaming is in
2285 * HFR mode */
2286 if (!m_bIsVideo && (streamList->operation_mode ==
2287 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2288 mDummyBatchStream = *newStream;
2289 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002290 int bufferCount = MAX_INFLIGHT_REQUESTS;
2291 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2292 CAM_STREAM_TYPE_VIDEO) {
2293 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2294 bufferCount = MAX_VIDEO_BUFFERS;
2295 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2297 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002298 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002299 this,
2300 newStream,
2301 (cam_stream_type_t)
2302 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2303 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2304 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002305 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002306 if (channel == NULL) {
2307 LOGE("allocation of channel failed");
2308 pthread_mutex_unlock(&mMutex);
2309 return -ENOMEM;
2310 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002311 /* disable UBWC for preview, though supported,
2312 * to take advantage of CPP duplication */
2313 if (m_bIsVideo && (!mCommon.isVideoUBWCEnabled()) &&
2314 (previewSize.width == (int32_t)videoWidth)&&
2315 (previewSize.height == (int32_t)videoHeight)){
2316 channel->setUBWCEnabled(false);
2317 }else {
2318 channel->setUBWCEnabled(true);
2319 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 newStream->max_buffers = channel->getNumBuffers();
2321 newStream->priv = channel;
2322 }
2323 break;
2324 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2325 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2326 mChannelHandle,
2327 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002328 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 this,
2330 newStream,
2331 (cam_stream_type_t)
2332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2333 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2334 mMetadataChannel);
2335 if (channel == NULL) {
2336 LOGE("allocation of YUV channel failed");
2337 pthread_mutex_unlock(&mMutex);
2338 return -ENOMEM;
2339 }
2340 newStream->max_buffers = channel->getNumBuffers();
2341 newStream->priv = channel;
2342 break;
2343 }
2344 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2345 case HAL_PIXEL_FORMAT_RAW16:
2346 case HAL_PIXEL_FORMAT_RAW10:
2347 mRawChannel = new QCamera3RawChannel(
2348 mCameraHandle->camera_handle, mChannelHandle,
2349 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002350 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 this, newStream,
2352 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2353 mMetadataChannel,
2354 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2355 if (mRawChannel == NULL) {
2356 LOGE("allocation of raw channel failed");
2357 pthread_mutex_unlock(&mMutex);
2358 return -ENOMEM;
2359 }
2360 newStream->max_buffers = mRawChannel->getNumBuffers();
2361 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2362 break;
2363 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002364 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2365 mDepthChannel = new QCamera3DepthChannel(
2366 mCameraHandle->camera_handle, mChannelHandle,
2367 mCameraHandle->ops, NULL, NULL, &padding_info,
2368 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2369 mMetadataChannel);
2370 if (NULL == mDepthChannel) {
2371 LOGE("Allocation of depth channel failed");
2372 pthread_mutex_unlock(&mMutex);
2373 return NO_MEMORY;
2374 }
2375 newStream->priv = mDepthChannel;
2376 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2377 } else {
2378 // Max live snapshot inflight buffer is 1. This is to mitigate
2379 // frame drop issues for video snapshot. The more buffers being
2380 // allocated, the more frame drops there are.
2381 mPictureChannel = new QCamera3PicChannel(
2382 mCameraHandle->camera_handle, mChannelHandle,
2383 mCameraHandle->ops, captureResultCb,
2384 setBufferErrorStatus, &padding_info, this, newStream,
2385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2386 m_bIs4KVideo, isZsl, mMetadataChannel,
2387 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2388 if (mPictureChannel == NULL) {
2389 LOGE("allocation of channel failed");
2390 pthread_mutex_unlock(&mMutex);
2391 return -ENOMEM;
2392 }
2393 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2394 newStream->max_buffers = mPictureChannel->getNumBuffers();
2395 mPictureChannel->overrideYuvSize(
2396 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2397 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 break;
2400
2401 default:
2402 LOGE("not a supported format 0x%x", newStream->format);
2403 break;
2404 }
2405 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2406 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2407 } else {
2408 LOGE("Error, Unknown stream type");
2409 pthread_mutex_unlock(&mMutex);
2410 return -EINVAL;
2411 }
2412
2413 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2414 if (channel != NULL && channel->isUBWCEnabled()) {
2415 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002416 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2417 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002418 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2419 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2420 }
2421 }
2422
2423 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2424 it != mStreamInfo.end(); it++) {
2425 if ((*it)->stream == newStream) {
2426 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2427 break;
2428 }
2429 }
2430 } else {
2431 // Channel already exists for this stream
2432 // Do nothing for now
2433 }
2434 padding_info = gCamCapability[mCameraId]->padding_info;
2435
Emilian Peev7650c122017-01-19 08:24:33 -08002436 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 * since there is no real stream associated with it
2438 */
Emilian Peev7650c122017-01-19 08:24:33 -08002439 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2440 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002442 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002443 }
2444
Thierry Strudel2896d122017-02-23 19:18:03 -08002445 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2446 onlyRaw = false;
2447 }
2448
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002449 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002450 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002451 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002452 cam_analysis_info_t analysisInfo;
2453 int32_t ret = NO_ERROR;
2454 ret = mCommon.getAnalysisInfo(
2455 FALSE,
2456 analysisFeatureMask,
2457 &analysisInfo);
2458 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002459 cam_color_filter_arrangement_t analysis_color_arrangement =
2460 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2461 CAM_FILTER_ARRANGEMENT_Y :
2462 gCamCapability[mCameraId]->color_arrangement);
2463 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2464 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002465 cam_dimension_t analysisDim;
2466 analysisDim = mCommon.getMatchingDimension(previewSize,
2467 analysisInfo.analysis_recommended_res);
2468
2469 mAnalysisChannel = new QCamera3SupportChannel(
2470 mCameraHandle->camera_handle,
2471 mChannelHandle,
2472 mCameraHandle->ops,
2473 &analysisInfo.analysis_padding_info,
2474 analysisFeatureMask,
2475 CAM_STREAM_TYPE_ANALYSIS,
2476 &analysisDim,
2477 (analysisInfo.analysis_format
2478 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2479 : CAM_FORMAT_YUV_420_NV21),
2480 analysisInfo.hw_analysis_supported,
2481 gCamCapability[mCameraId]->color_arrangement,
2482 this,
2483 0); // force buffer count to 0
2484 } else {
2485 LOGW("getAnalysisInfo failed, ret = %d", ret);
2486 }
2487 if (!mAnalysisChannel) {
2488 LOGW("Analysis channel cannot be created");
2489 }
2490 }
2491
Thierry Strudel3d639192016-09-09 11:52:26 -07002492 //RAW DUMP channel
2493 if (mEnableRawDump && isRawStreamRequested == false){
2494 cam_dimension_t rawDumpSize;
2495 rawDumpSize = getMaxRawSize(mCameraId);
2496 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2497 setPAAFSupport(rawDumpFeatureMask,
2498 CAM_STREAM_TYPE_RAW,
2499 gCamCapability[mCameraId]->color_arrangement);
2500 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2501 mChannelHandle,
2502 mCameraHandle->ops,
2503 rawDumpSize,
2504 &padding_info,
2505 this, rawDumpFeatureMask);
2506 if (!mRawDumpChannel) {
2507 LOGE("Raw Dump channel cannot be created");
2508 pthread_mutex_unlock(&mMutex);
2509 return -ENOMEM;
2510 }
2511 }
2512
Chien-Yu Chenee335912017-02-09 17:53:20 -08002513 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002514 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002515 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002516 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2517 "HDR+ RAW source channel is not created.",
2518 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002519 } else {
2520 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2521 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2522 setPAAFSupport(hdrPlusRawFeatureMask,
2523 CAM_STREAM_TYPE_RAW,
2524 gCamCapability[mCameraId]->color_arrangement);
2525 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2526 mChannelHandle,
2527 mCameraHandle->ops,
2528 rawSize,
2529 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002530 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002531 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002532 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002533 if (!mHdrPlusRawSrcChannel) {
2534 LOGE("HDR+ Raw Source channel cannot be created");
2535 pthread_mutex_unlock(&mMutex);
2536 return -ENOMEM;
2537 }
2538 }
2539 }
2540
Thierry Strudel3d639192016-09-09 11:52:26 -07002541 if (mAnalysisChannel) {
2542 cam_analysis_info_t analysisInfo;
2543 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2544 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2545 CAM_STREAM_TYPE_ANALYSIS;
2546 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2547 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002548 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002549 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2550 &analysisInfo);
2551 if (rc != NO_ERROR) {
2552 LOGE("getAnalysisInfo failed, ret = %d", rc);
2553 pthread_mutex_unlock(&mMutex);
2554 return rc;
2555 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002556 cam_color_filter_arrangement_t analysis_color_arrangement =
2557 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2558 CAM_FILTER_ARRANGEMENT_Y :
2559 gCamCapability[mCameraId]->color_arrangement);
2560 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2561 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2562 analysis_color_arrangement);
2563
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002565 mCommon.getMatchingDimension(previewSize,
2566 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002567 mStreamConfigInfo.num_streams++;
2568 }
2569
Thierry Strudel2896d122017-02-23 19:18:03 -08002570 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 cam_analysis_info_t supportInfo;
2572 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2573 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2574 setPAAFSupport(callbackFeatureMask,
2575 CAM_STREAM_TYPE_CALLBACK,
2576 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002577 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002578 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002579 if (ret != NO_ERROR) {
2580 /* Ignore the error for Mono camera
2581 * because the PAAF bit mask is only set
2582 * for CAM_STREAM_TYPE_ANALYSIS stream type
2583 */
2584 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2585 LOGW("getAnalysisInfo failed, ret = %d", ret);
2586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002587 }
2588 mSupportChannel = new QCamera3SupportChannel(
2589 mCameraHandle->camera_handle,
2590 mChannelHandle,
2591 mCameraHandle->ops,
2592 &gCamCapability[mCameraId]->padding_info,
2593 callbackFeatureMask,
2594 CAM_STREAM_TYPE_CALLBACK,
2595 &QCamera3SupportChannel::kDim,
2596 CAM_FORMAT_YUV_420_NV21,
2597 supportInfo.hw_analysis_supported,
2598 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002599 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 if (!mSupportChannel) {
2601 LOGE("dummy channel cannot be created");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 }
2606
2607 if (mSupportChannel) {
2608 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2609 QCamera3SupportChannel::kDim;
2610 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2611 CAM_STREAM_TYPE_CALLBACK;
2612 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2613 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2614 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2615 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2616 gCamCapability[mCameraId]->color_arrangement);
2617 mStreamConfigInfo.num_streams++;
2618 }
2619
2620 if (mRawDumpChannel) {
2621 cam_dimension_t rawSize;
2622 rawSize = getMaxRawSize(mCameraId);
2623 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2624 rawSize;
2625 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2626 CAM_STREAM_TYPE_RAW;
2627 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2628 CAM_QCOM_FEATURE_NONE;
2629 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2630 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2631 gCamCapability[mCameraId]->color_arrangement);
2632 mStreamConfigInfo.num_streams++;
2633 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002634
2635 if (mHdrPlusRawSrcChannel) {
2636 cam_dimension_t rawSize;
2637 rawSize = getMaxRawSize(mCameraId);
2638 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2639 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2640 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2641 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2642 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2643 gCamCapability[mCameraId]->color_arrangement);
2644 mStreamConfigInfo.num_streams++;
2645 }
2646
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 /* In HFR mode, if video stream is not added, create a dummy channel so that
2648 * ISP can create a batch mode even for preview only case. This channel is
2649 * never 'start'ed (no stream-on), it is only 'initialized' */
2650 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2651 !m_bIsVideo) {
2652 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2653 setPAAFSupport(dummyFeatureMask,
2654 CAM_STREAM_TYPE_VIDEO,
2655 gCamCapability[mCameraId]->color_arrangement);
2656 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2657 mChannelHandle,
2658 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002659 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 this,
2661 &mDummyBatchStream,
2662 CAM_STREAM_TYPE_VIDEO,
2663 dummyFeatureMask,
2664 mMetadataChannel);
2665 if (NULL == mDummyBatchChannel) {
2666 LOGE("creation of mDummyBatchChannel failed."
2667 "Preview will use non-hfr sensor mode ");
2668 }
2669 }
2670 if (mDummyBatchChannel) {
2671 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2672 mDummyBatchStream.width;
2673 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2674 mDummyBatchStream.height;
2675 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2676 CAM_STREAM_TYPE_VIDEO;
2677 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2678 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2679 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2680 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2681 gCamCapability[mCameraId]->color_arrangement);
2682 mStreamConfigInfo.num_streams++;
2683 }
2684
2685 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2686 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002687 m_bIs4KVideo ? 0 :
2688 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002689
2690 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2691 for (pendingRequestIterator i = mPendingRequestsList.begin();
2692 i != mPendingRequestsList.end();) {
2693 i = erasePendingRequest(i);
2694 }
2695 mPendingFrameDropList.clear();
2696 // Initialize/Reset the pending buffers list
2697 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2698 req.mPendingBufferList.clear();
2699 }
2700 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2701
Thierry Strudel3d639192016-09-09 11:52:26 -07002702 mCurJpegMeta.clear();
2703 //Get min frame duration for this streams configuration
2704 deriveMinFrameDuration();
2705
Chien-Yu Chenee335912017-02-09 17:53:20 -08002706 mFirstPreviewIntentSeen = false;
2707
2708 // Disable HRD+ if it's enabled;
2709 disableHdrPlusModeLocked();
2710
Thierry Strudel3d639192016-09-09 11:52:26 -07002711 // Update state
2712 mState = CONFIGURED;
2713
2714 pthread_mutex_unlock(&mMutex);
2715
2716 return rc;
2717}
2718
2719/*===========================================================================
2720 * FUNCTION : validateCaptureRequest
2721 *
2722 * DESCRIPTION: validate a capture request from camera service
2723 *
2724 * PARAMETERS :
2725 * @request : request from framework to process
2726 *
2727 * RETURN :
2728 *
2729 *==========================================================================*/
2730int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002731 camera3_capture_request_t *request,
2732 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002733{
2734 ssize_t idx = 0;
2735 const camera3_stream_buffer_t *b;
2736 CameraMetadata meta;
2737
2738 /* Sanity check the request */
2739 if (request == NULL) {
2740 LOGE("NULL capture request");
2741 return BAD_VALUE;
2742 }
2743
2744 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2745 /*settings cannot be null for the first request*/
2746 return BAD_VALUE;
2747 }
2748
2749 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002750 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2751 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 LOGE("Request %d: No output buffers provided!",
2753 __FUNCTION__, frameNumber);
2754 return BAD_VALUE;
2755 }
2756 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2757 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2758 request->num_output_buffers, MAX_NUM_STREAMS);
2759 return BAD_VALUE;
2760 }
2761 if (request->input_buffer != NULL) {
2762 b = request->input_buffer;
2763 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2764 LOGE("Request %d: Buffer %ld: Status not OK!",
2765 frameNumber, (long)idx);
2766 return BAD_VALUE;
2767 }
2768 if (b->release_fence != -1) {
2769 LOGE("Request %d: Buffer %ld: Has a release fence!",
2770 frameNumber, (long)idx);
2771 return BAD_VALUE;
2772 }
2773 if (b->buffer == NULL) {
2774 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2775 frameNumber, (long)idx);
2776 return BAD_VALUE;
2777 }
2778 }
2779
2780 // Validate all buffers
2781 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002782 if (b == NULL) {
2783 return BAD_VALUE;
2784 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002785 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002786 QCamera3ProcessingChannel *channel =
2787 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2788 if (channel == NULL) {
2789 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2790 frameNumber, (long)idx);
2791 return BAD_VALUE;
2792 }
2793 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2794 LOGE("Request %d: Buffer %ld: Status not OK!",
2795 frameNumber, (long)idx);
2796 return BAD_VALUE;
2797 }
2798 if (b->release_fence != -1) {
2799 LOGE("Request %d: Buffer %ld: Has a release fence!",
2800 frameNumber, (long)idx);
2801 return BAD_VALUE;
2802 }
2803 if (b->buffer == NULL) {
2804 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2805 frameNumber, (long)idx);
2806 return BAD_VALUE;
2807 }
2808 if (*(b->buffer) == NULL) {
2809 LOGE("Request %d: Buffer %ld: NULL private handle!",
2810 frameNumber, (long)idx);
2811 return BAD_VALUE;
2812 }
2813 idx++;
2814 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002815 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002816 return NO_ERROR;
2817}
2818
2819/*===========================================================================
2820 * FUNCTION : deriveMinFrameDuration
2821 *
2822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2823 * on currently configured streams.
2824 *
2825 * PARAMETERS : NONE
2826 *
2827 * RETURN : NONE
2828 *
2829 *==========================================================================*/
2830void QCamera3HardwareInterface::deriveMinFrameDuration()
2831{
2832 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2833
2834 maxJpegDim = 0;
2835 maxProcessedDim = 0;
2836 maxRawDim = 0;
2837
2838 // Figure out maximum jpeg, processed, and raw dimensions
2839 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2840 it != mStreamInfo.end(); it++) {
2841
2842 // Input stream doesn't have valid stream_type
2843 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2844 continue;
2845
2846 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2847 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2848 if (dimension > maxJpegDim)
2849 maxJpegDim = dimension;
2850 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2851 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2852 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2853 if (dimension > maxRawDim)
2854 maxRawDim = dimension;
2855 } else {
2856 if (dimension > maxProcessedDim)
2857 maxProcessedDim = dimension;
2858 }
2859 }
2860
2861 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2862 MAX_SIZES_CNT);
2863
2864 //Assume all jpeg dimensions are in processed dimensions.
2865 if (maxJpegDim > maxProcessedDim)
2866 maxProcessedDim = maxJpegDim;
2867 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2868 if (maxProcessedDim > maxRawDim) {
2869 maxRawDim = INT32_MAX;
2870
2871 for (size_t i = 0; i < count; i++) {
2872 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2873 gCamCapability[mCameraId]->raw_dim[i].height;
2874 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2875 maxRawDim = dimension;
2876 }
2877 }
2878
2879 //Find minimum durations for processed, jpeg, and raw
2880 for (size_t i = 0; i < count; i++) {
2881 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2882 gCamCapability[mCameraId]->raw_dim[i].height) {
2883 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2884 break;
2885 }
2886 }
2887 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2888 for (size_t i = 0; i < count; i++) {
2889 if (maxProcessedDim ==
2890 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2891 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2892 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2893 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2894 break;
2895 }
2896 }
2897}
2898
2899/*===========================================================================
2900 * FUNCTION : getMinFrameDuration
2901 *
2902 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2903 * and current request configuration.
2904 *
2905 * PARAMETERS : @request: requset sent by the frameworks
2906 *
2907 * RETURN : min farme duration for a particular request
2908 *
2909 *==========================================================================*/
2910int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2911{
2912 bool hasJpegStream = false;
2913 bool hasRawStream = false;
2914 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2915 const camera3_stream_t *stream = request->output_buffers[i].stream;
2916 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2917 hasJpegStream = true;
2918 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2919 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2920 stream->format == HAL_PIXEL_FORMAT_RAW16)
2921 hasRawStream = true;
2922 }
2923
2924 if (!hasJpegStream)
2925 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2926 else
2927 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2928}
2929
2930/*===========================================================================
2931 * FUNCTION : handleBuffersDuringFlushLock
2932 *
2933 * DESCRIPTION: Account for buffers returned from back-end during flush
2934 * This function is executed while mMutex is held by the caller.
2935 *
2936 * PARAMETERS :
2937 * @buffer: image buffer for the callback
2938 *
2939 * RETURN :
2940 *==========================================================================*/
2941void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2942{
2943 bool buffer_found = false;
2944 for (List<PendingBuffersInRequest>::iterator req =
2945 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2946 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2947 for (List<PendingBufferInfo>::iterator i =
2948 req->mPendingBufferList.begin();
2949 i != req->mPendingBufferList.end(); i++) {
2950 if (i->buffer == buffer->buffer) {
2951 mPendingBuffersMap.numPendingBufsAtFlush--;
2952 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2953 buffer->buffer, req->frame_number,
2954 mPendingBuffersMap.numPendingBufsAtFlush);
2955 buffer_found = true;
2956 break;
2957 }
2958 }
2959 if (buffer_found) {
2960 break;
2961 }
2962 }
2963 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2964 //signal the flush()
2965 LOGD("All buffers returned to HAL. Continue flush");
2966 pthread_cond_signal(&mBuffersCond);
2967 }
2968}
2969
Thierry Strudel3d639192016-09-09 11:52:26 -07002970/*===========================================================================
2971 * FUNCTION : handleBatchMetadata
2972 *
2973 * DESCRIPTION: Handles metadata buffer callback in batch mode
2974 *
2975 * PARAMETERS : @metadata_buf: metadata buffer
2976 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2977 * the meta buf in this method
2978 *
2979 * RETURN :
2980 *
2981 *==========================================================================*/
2982void QCamera3HardwareInterface::handleBatchMetadata(
2983 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002985 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002986
2987 if (NULL == metadata_buf) {
2988 LOGE("metadata_buf is NULL");
2989 return;
2990 }
2991 /* In batch mode, the metdata will contain the frame number and timestamp of
2992 * the last frame in the batch. Eg: a batch containing buffers from request
2993 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2994 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2995 * multiple process_capture_results */
2996 metadata_buffer_t *metadata =
2997 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2998 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2999 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3000 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3001 uint32_t frame_number = 0, urgent_frame_number = 0;
3002 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3003 bool invalid_metadata = false;
3004 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3005 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003006 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003007
3008 int32_t *p_frame_number_valid =
3009 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3010 uint32_t *p_frame_number =
3011 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3012 int64_t *p_capture_time =
3013 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3014 int32_t *p_urgent_frame_number_valid =
3015 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3016 uint32_t *p_urgent_frame_number =
3017 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3018
3019 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3020 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3021 (NULL == p_urgent_frame_number)) {
3022 LOGE("Invalid metadata");
3023 invalid_metadata = true;
3024 } else {
3025 frame_number_valid = *p_frame_number_valid;
3026 last_frame_number = *p_frame_number;
3027 last_frame_capture_time = *p_capture_time;
3028 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3029 last_urgent_frame_number = *p_urgent_frame_number;
3030 }
3031
3032 /* In batchmode, when no video buffers are requested, set_parms are sent
3033 * for every capture_request. The difference between consecutive urgent
3034 * frame numbers and frame numbers should be used to interpolate the
3035 * corresponding frame numbers and time stamps */
3036 pthread_mutex_lock(&mMutex);
3037 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003038 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3039 if(idx < 0) {
3040 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3041 last_urgent_frame_number);
3042 mState = ERROR;
3043 pthread_mutex_unlock(&mMutex);
3044 return;
3045 }
3046 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003047 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3048 first_urgent_frame_number;
3049
3050 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3051 urgent_frame_number_valid,
3052 first_urgent_frame_number, last_urgent_frame_number);
3053 }
3054
3055 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003056 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3057 if(idx < 0) {
3058 LOGE("Invalid frame number received: %d. Irrecoverable error",
3059 last_frame_number);
3060 mState = ERROR;
3061 pthread_mutex_unlock(&mMutex);
3062 return;
3063 }
3064 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003065 frameNumDiff = last_frame_number + 1 -
3066 first_frame_number;
3067 mPendingBatchMap.removeItem(last_frame_number);
3068
3069 LOGD("frm: valid: %d frm_num: %d - %d",
3070 frame_number_valid,
3071 first_frame_number, last_frame_number);
3072
3073 }
3074 pthread_mutex_unlock(&mMutex);
3075
3076 if (urgent_frame_number_valid || frame_number_valid) {
3077 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3078 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3079 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3080 urgentFrameNumDiff, last_urgent_frame_number);
3081 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3082 LOGE("frameNumDiff: %d frameNum: %d",
3083 frameNumDiff, last_frame_number);
3084 }
3085
3086 for (size_t i = 0; i < loopCount; i++) {
3087 /* handleMetadataWithLock is called even for invalid_metadata for
3088 * pipeline depth calculation */
3089 if (!invalid_metadata) {
3090 /* Infer frame number. Batch metadata contains frame number of the
3091 * last frame */
3092 if (urgent_frame_number_valid) {
3093 if (i < urgentFrameNumDiff) {
3094 urgent_frame_number =
3095 first_urgent_frame_number + i;
3096 LOGD("inferred urgent frame_number: %d",
3097 urgent_frame_number);
3098 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3099 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3100 } else {
3101 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3102 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3103 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3104 }
3105 }
3106
3107 /* Infer frame number. Batch metadata contains frame number of the
3108 * last frame */
3109 if (frame_number_valid) {
3110 if (i < frameNumDiff) {
3111 frame_number = first_frame_number + i;
3112 LOGD("inferred frame_number: %d", frame_number);
3113 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3114 CAM_INTF_META_FRAME_NUMBER, frame_number);
3115 } else {
3116 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3117 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3118 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3119 }
3120 }
3121
3122 if (last_frame_capture_time) {
3123 //Infer timestamp
3124 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003125 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003126 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003127 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003128 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3129 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3130 LOGD("batch capture_time: %lld, capture_time: %lld",
3131 last_frame_capture_time, capture_time);
3132 }
3133 }
3134 pthread_mutex_lock(&mMutex);
3135 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003136 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003137 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3138 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003139 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003140 pthread_mutex_unlock(&mMutex);
3141 }
3142
3143 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003144 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003145 mMetadataChannel->bufDone(metadata_buf);
3146 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003147 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003148 }
3149}
3150
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003151void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3152 camera3_error_msg_code_t errorCode)
3153{
3154 camera3_notify_msg_t notify_msg;
3155 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3156 notify_msg.type = CAMERA3_MSG_ERROR;
3157 notify_msg.message.error.error_code = errorCode;
3158 notify_msg.message.error.error_stream = NULL;
3159 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003160 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003161
3162 return;
3163}
Thierry Strudel3d639192016-09-09 11:52:26 -07003164/*===========================================================================
3165 * FUNCTION : handleMetadataWithLock
3166 *
3167 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3168 *
3169 * PARAMETERS : @metadata_buf: metadata buffer
3170 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3171 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003172 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3173 * last urgent metadata in a batch. Always true for non-batch mode
3174 * @lastMetadataInBatch: Boolean to indicate whether this is the
3175 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003176 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3177 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003178 *
3179 * RETURN :
3180 *
3181 *==========================================================================*/
3182void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003183 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003184 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3185 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003186{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003187 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003188 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3189 //during flush do not send metadata from this thread
3190 LOGD("not sending metadata during flush or when mState is error");
3191 if (free_and_bufdone_meta_buf) {
3192 mMetadataChannel->bufDone(metadata_buf);
3193 free(metadata_buf);
3194 }
3195 return;
3196 }
3197
3198 //not in flush
3199 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3200 int32_t frame_number_valid, urgent_frame_number_valid;
3201 uint32_t frame_number, urgent_frame_number;
3202 int64_t capture_time;
3203 nsecs_t currentSysTime;
3204
3205 int32_t *p_frame_number_valid =
3206 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3207 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3208 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3209 int32_t *p_urgent_frame_number_valid =
3210 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3211 uint32_t *p_urgent_frame_number =
3212 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3213 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3214 metadata) {
3215 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3216 *p_frame_number_valid, *p_frame_number);
3217 }
3218
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003219 camera_metadata_t *resultMetadata = nullptr;
3220
Thierry Strudel3d639192016-09-09 11:52:26 -07003221 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3222 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3223 LOGE("Invalid metadata");
3224 if (free_and_bufdone_meta_buf) {
3225 mMetadataChannel->bufDone(metadata_buf);
3226 free(metadata_buf);
3227 }
3228 goto done_metadata;
3229 }
3230 frame_number_valid = *p_frame_number_valid;
3231 frame_number = *p_frame_number;
3232 capture_time = *p_capture_time;
3233 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3234 urgent_frame_number = *p_urgent_frame_number;
3235 currentSysTime = systemTime(CLOCK_MONOTONIC);
3236
3237 // Detect if buffers from any requests are overdue
3238 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003239 int64_t timeout;
3240 {
3241 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3242 // If there is a pending HDR+ request, the following requests may be blocked until the
3243 // HDR+ request is done. So allow a longer timeout.
3244 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3245 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3246 }
3247
3248 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003249 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003250 assert(missed.stream->priv);
3251 if (missed.stream->priv) {
3252 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3253 assert(ch->mStreams[0]);
3254 if (ch->mStreams[0]) {
3255 LOGE("Cancel missing frame = %d, buffer = %p,"
3256 "stream type = %d, stream format = %d",
3257 req.frame_number, missed.buffer,
3258 ch->mStreams[0]->getMyType(), missed.stream->format);
3259 ch->timeoutFrame(req.frame_number);
3260 }
3261 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003262 }
3263 }
3264 }
3265 //Partial result on process_capture_result for timestamp
3266 if (urgent_frame_number_valid) {
3267 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3268 urgent_frame_number, capture_time);
3269
3270 //Recieved an urgent Frame Number, handle it
3271 //using partial results
3272 for (pendingRequestIterator i =
3273 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3274 LOGD("Iterator Frame = %d urgent frame = %d",
3275 i->frame_number, urgent_frame_number);
3276
3277 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3278 (i->partial_result_cnt == 0)) {
3279 LOGE("Error: HAL missed urgent metadata for frame number %d",
3280 i->frame_number);
3281 }
3282
3283 if (i->frame_number == urgent_frame_number &&
3284 i->bUrgentReceived == 0) {
3285
3286 camera3_capture_result_t result;
3287 memset(&result, 0, sizeof(camera3_capture_result_t));
3288
3289 i->partial_result_cnt++;
3290 i->bUrgentReceived = 1;
3291 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003292 result.result = translateCbUrgentMetadataToResultMetadata(
3293 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003294 // Populate metadata result
3295 result.frame_number = urgent_frame_number;
3296 result.num_output_buffers = 0;
3297 result.output_buffers = NULL;
3298 result.partial_result = i->partial_result_cnt;
3299
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003300 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003301 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003302 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003303 result.partial_result == PARTIAL_RESULT_COUNT);
3304 }
3305
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003306 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003307 LOGD("urgent frame_number = %u, capture_time = %lld",
3308 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003309 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3310 // Instant AEC settled for this frame.
3311 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3312 mInstantAECSettledFrameNumber = urgent_frame_number;
3313 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003314 free_camera_metadata((camera_metadata_t *)result.result);
3315 break;
3316 }
3317 }
3318 }
3319
3320 if (!frame_number_valid) {
3321 LOGD("Not a valid normal frame number, used as SOF only");
3322 if (free_and_bufdone_meta_buf) {
3323 mMetadataChannel->bufDone(metadata_buf);
3324 free(metadata_buf);
3325 }
3326 goto done_metadata;
3327 }
3328 LOGH("valid frame_number = %u, capture_time = %lld",
3329 frame_number, capture_time);
3330
Emilian Peev7650c122017-01-19 08:24:33 -08003331 if (metadata->is_depth_data_valid) {
3332 handleDepthDataLocked(metadata->depth_data, frame_number);
3333 }
3334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003335 // Check whether any stream buffer corresponding to this is dropped or not
3336 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3337 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3338 for (auto & pendingRequest : mPendingRequestsList) {
3339 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3340 mInstantAECSettledFrameNumber)) {
3341 camera3_notify_msg_t notify_msg = {};
3342 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003343 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003344 QCamera3ProcessingChannel *channel =
3345 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003346 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003347 if (p_cam_frame_drop) {
3348 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003349 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003350 // Got the stream ID for drop frame.
3351 dropFrame = true;
3352 break;
3353 }
3354 }
3355 } else {
3356 // This is instant AEC case.
3357 // For instant AEC drop the stream untill AEC is settled.
3358 dropFrame = true;
3359 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003360
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003361 if (dropFrame) {
3362 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3363 if (p_cam_frame_drop) {
3364 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003365 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003366 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003367 } else {
3368 // For instant AEC, inform frame drop and frame number
3369 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3370 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003371 pendingRequest.frame_number, streamID,
3372 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003373 }
3374 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003375 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003376 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003377 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003378 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003379 if (p_cam_frame_drop) {
3380 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003381 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003382 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003383 } else {
3384 // For instant AEC, inform frame drop and frame number
3385 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3386 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 pendingRequest.frame_number, streamID,
3388 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003389 }
3390 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003391 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003392 PendingFrameDrop.stream_ID = streamID;
3393 // Add the Frame drop info to mPendingFrameDropList
3394 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003395 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003396 }
3397 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003398 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003399
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003400 for (auto & pendingRequest : mPendingRequestsList) {
3401 // Find the pending request with the frame number.
3402 if (pendingRequest.frame_number == frame_number) {
3403 // Update the sensor timestamp.
3404 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003405
Thierry Strudel3d639192016-09-09 11:52:26 -07003406
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003407 /* Set the timestamp in display metadata so that clients aware of
3408 private_handle such as VT can use this un-modified timestamps.
3409 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003410 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003411
Thierry Strudel3d639192016-09-09 11:52:26 -07003412 // Find channel requiring metadata, meaning internal offline postprocess
3413 // is needed.
3414 //TODO: for now, we don't support two streams requiring metadata at the same time.
3415 // (because we are not making copies, and metadata buffer is not reference counted.
3416 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003417 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3418 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003419 if (iter->need_metadata) {
3420 internalPproc = true;
3421 QCamera3ProcessingChannel *channel =
3422 (QCamera3ProcessingChannel *)iter->stream->priv;
3423 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003424 if(p_is_metabuf_queued != NULL) {
3425 *p_is_metabuf_queued = true;
3426 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003427 break;
3428 }
3429 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003430 for (auto itr = pendingRequest.internalRequestList.begin();
3431 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003432 if (itr->need_metadata) {
3433 internalPproc = true;
3434 QCamera3ProcessingChannel *channel =
3435 (QCamera3ProcessingChannel *)itr->stream->priv;
3436 channel->queueReprocMetadata(metadata_buf);
3437 break;
3438 }
3439 }
3440
Thierry Strudel54dc9782017-02-15 12:12:10 -08003441 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003442 resultMetadata = translateFromHalMetadata(metadata,
3443 pendingRequest.timestamp, pendingRequest.request_id,
3444 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3445 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003446 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003447 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003448 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003449 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003450 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003451 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003452
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003453 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003454
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003455 if (pendingRequest.blob_request) {
3456 //Dump tuning metadata if enabled and available
3457 char prop[PROPERTY_VALUE_MAX];
3458 memset(prop, 0, sizeof(prop));
3459 property_get("persist.camera.dumpmetadata", prop, "0");
3460 int32_t enabled = atoi(prop);
3461 if (enabled && metadata->is_tuning_params_valid) {
3462 dumpMetadataToFile(metadata->tuning_params,
3463 mMetaFrameCount,
3464 enabled,
3465 "Snapshot",
3466 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003467 }
3468 }
3469
3470 if (!internalPproc) {
3471 LOGD("couldn't find need_metadata for this metadata");
3472 // Return metadata buffer
3473 if (free_and_bufdone_meta_buf) {
3474 mMetadataChannel->bufDone(metadata_buf);
3475 free(metadata_buf);
3476 }
3477 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003478
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003479 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 }
3481 }
3482
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003483 // Try to send out shutter callbacks and capture results.
3484 handlePendingResultsWithLock(frame_number, resultMetadata);
3485 return;
3486
Thierry Strudel3d639192016-09-09 11:52:26 -07003487done_metadata:
3488 for (pendingRequestIterator i = mPendingRequestsList.begin();
3489 i != mPendingRequestsList.end() ;i++) {
3490 i->pipeline_depth++;
3491 }
3492 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3493 unblockRequestIfNecessary();
3494}
3495
3496/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003497 * FUNCTION : handleDepthDataWithLock
3498 *
3499 * DESCRIPTION: Handles incoming depth data
3500 *
3501 * PARAMETERS : @depthData : Depth data
3502 * @frameNumber: Frame number of the incoming depth data
3503 *
3504 * RETURN :
3505 *
3506 *==========================================================================*/
3507void QCamera3HardwareInterface::handleDepthDataLocked(
3508 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3509 uint32_t currentFrameNumber;
3510 buffer_handle_t *depthBuffer;
3511
3512 if (nullptr == mDepthChannel) {
3513 LOGE("Depth channel not present!");
3514 return;
3515 }
3516
3517 camera3_stream_buffer_t resultBuffer =
3518 {.acquire_fence = -1,
3519 .release_fence = -1,
3520 .status = CAMERA3_BUFFER_STATUS_OK,
3521 .buffer = nullptr,
3522 .stream = mDepthChannel->getStream()};
3523 camera3_capture_result_t result =
3524 {.result = nullptr,
3525 .num_output_buffers = 1,
3526 .output_buffers = &resultBuffer,
3527 .partial_result = 0,
3528 .frame_number = 0};
3529
3530 do {
3531 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3532 if (nullptr == depthBuffer) {
3533 break;
3534 }
3535
3536 result.frame_number = currentFrameNumber;
3537 resultBuffer.buffer = depthBuffer;
3538 if (currentFrameNumber == frameNumber) {
3539 int32_t rc = mDepthChannel->populateDepthData(depthData,
3540 frameNumber);
3541 if (NO_ERROR != rc) {
3542 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3543 } else {
3544 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3545 }
3546 } else if (currentFrameNumber > frameNumber) {
3547 break;
3548 } else {
3549 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3550 {{currentFrameNumber, mDepthChannel->getStream(),
3551 CAMERA3_MSG_ERROR_BUFFER}}};
3552 orchestrateNotify(&notify_msg);
3553
3554 LOGE("Depth buffer for frame number: %d is missing "
3555 "returning back!", currentFrameNumber);
3556 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3557 }
3558 mDepthChannel->unmapBuffer(currentFrameNumber);
3559
3560 orchestrateResult(&result);
3561 } while (currentFrameNumber < frameNumber);
3562}
3563
3564/*===========================================================================
3565 * FUNCTION : notifyErrorFoPendingDepthData
3566 *
3567 * DESCRIPTION: Returns error for any pending depth buffers
3568 *
3569 * PARAMETERS : depthCh - depth channel that needs to get flushed
3570 *
3571 * RETURN :
3572 *
3573 *==========================================================================*/
3574void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3575 QCamera3DepthChannel *depthCh) {
3576 uint32_t currentFrameNumber;
3577 buffer_handle_t *depthBuffer;
3578
3579 if (nullptr == depthCh) {
3580 return;
3581 }
3582
3583 camera3_notify_msg_t notify_msg =
3584 {.type = CAMERA3_MSG_ERROR,
3585 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3586 camera3_stream_buffer_t resultBuffer =
3587 {.acquire_fence = -1,
3588 .release_fence = -1,
3589 .buffer = nullptr,
3590 .stream = depthCh->getStream(),
3591 .status = CAMERA3_BUFFER_STATUS_ERROR};
3592 camera3_capture_result_t result =
3593 {.result = nullptr,
3594 .frame_number = 0,
3595 .num_output_buffers = 1,
3596 .partial_result = 0,
3597 .output_buffers = &resultBuffer};
3598
3599 while (nullptr !=
3600 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3601 depthCh->unmapBuffer(currentFrameNumber);
3602
3603 notify_msg.message.error.frame_number = currentFrameNumber;
3604 orchestrateNotify(&notify_msg);
3605
3606 resultBuffer.buffer = depthBuffer;
3607 result.frame_number = currentFrameNumber;
3608 orchestrateResult(&result);
3609 };
3610}
3611
3612/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003613 * FUNCTION : hdrPlusPerfLock
3614 *
3615 * DESCRIPTION: perf lock for HDR+ using custom intent
3616 *
3617 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3618 *
3619 * RETURN : None
3620 *
3621 *==========================================================================*/
3622void QCamera3HardwareInterface::hdrPlusPerfLock(
3623 mm_camera_super_buf_t *metadata_buf)
3624{
3625 if (NULL == metadata_buf) {
3626 LOGE("metadata_buf is NULL");
3627 return;
3628 }
3629 metadata_buffer_t *metadata =
3630 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3631 int32_t *p_frame_number_valid =
3632 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3633 uint32_t *p_frame_number =
3634 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3635
3636 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3637 LOGE("%s: Invalid metadata", __func__);
3638 return;
3639 }
3640
3641 //acquire perf lock for 5 sec after the last HDR frame is captured
3642 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3643 if ((p_frame_number != NULL) &&
3644 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003645 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003646 }
3647 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003648}
3649
3650/*===========================================================================
3651 * FUNCTION : handleInputBufferWithLock
3652 *
3653 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3654 *
3655 * PARAMETERS : @frame_number: frame number of the input buffer
3656 *
3657 * RETURN :
3658 *
3659 *==========================================================================*/
3660void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3661{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003662 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 pendingRequestIterator i = mPendingRequestsList.begin();
3664 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3665 i++;
3666 }
3667 if (i != mPendingRequestsList.end() && i->input_buffer) {
3668 //found the right request
3669 if (!i->shutter_notified) {
3670 CameraMetadata settings;
3671 camera3_notify_msg_t notify_msg;
3672 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3673 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3674 if(i->settings) {
3675 settings = i->settings;
3676 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3677 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3678 } else {
3679 LOGE("No timestamp in input settings! Using current one.");
3680 }
3681 } else {
3682 LOGE("Input settings missing!");
3683 }
3684
3685 notify_msg.type = CAMERA3_MSG_SHUTTER;
3686 notify_msg.message.shutter.frame_number = frame_number;
3687 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003688 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 i->shutter_notified = true;
3690 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3691 i->frame_number, notify_msg.message.shutter.timestamp);
3692 }
3693
3694 if (i->input_buffer->release_fence != -1) {
3695 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3696 close(i->input_buffer->release_fence);
3697 if (rc != OK) {
3698 LOGE("input buffer sync wait failed %d", rc);
3699 }
3700 }
3701
3702 camera3_capture_result result;
3703 memset(&result, 0, sizeof(camera3_capture_result));
3704 result.frame_number = frame_number;
3705 result.result = i->settings;
3706 result.input_buffer = i->input_buffer;
3707 result.partial_result = PARTIAL_RESULT_COUNT;
3708
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003709 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003710 LOGD("Input request metadata and input buffer frame_number = %u",
3711 i->frame_number);
3712 i = erasePendingRequest(i);
3713 } else {
3714 LOGE("Could not find input request for frame number %d", frame_number);
3715 }
3716}
3717
3718/*===========================================================================
3719 * FUNCTION : handleBufferWithLock
3720 *
3721 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3722 *
3723 * PARAMETERS : @buffer: image buffer for the callback
3724 * @frame_number: frame number of the image buffer
3725 *
3726 * RETURN :
3727 *
3728 *==========================================================================*/
3729void QCamera3HardwareInterface::handleBufferWithLock(
3730 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3731{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003732 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003733
3734 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3735 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3736 }
3737
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 /* Nothing to be done during error state */
3739 if ((ERROR == mState) || (DEINIT == mState)) {
3740 return;
3741 }
3742 if (mFlushPerf) {
3743 handleBuffersDuringFlushLock(buffer);
3744 return;
3745 }
3746 //not in flush
3747 // If the frame number doesn't exist in the pending request list,
3748 // directly send the buffer to the frameworks, and update pending buffers map
3749 // Otherwise, book-keep the buffer.
3750 pendingRequestIterator i = mPendingRequestsList.begin();
3751 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3752 i++;
3753 }
3754 if (i == mPendingRequestsList.end()) {
3755 // Verify all pending requests frame_numbers are greater
3756 for (pendingRequestIterator j = mPendingRequestsList.begin();
3757 j != mPendingRequestsList.end(); j++) {
3758 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3759 LOGW("Error: pending live frame number %d is smaller than %d",
3760 j->frame_number, frame_number);
3761 }
3762 }
3763 camera3_capture_result_t result;
3764 memset(&result, 0, sizeof(camera3_capture_result_t));
3765 result.result = NULL;
3766 result.frame_number = frame_number;
3767 result.num_output_buffers = 1;
3768 result.partial_result = 0;
3769 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3770 m != mPendingFrameDropList.end(); m++) {
3771 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3772 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3773 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3774 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3775 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3776 frame_number, streamID);
3777 m = mPendingFrameDropList.erase(m);
3778 break;
3779 }
3780 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003781 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003782 result.output_buffers = buffer;
3783 LOGH("result frame_number = %d, buffer = %p",
3784 frame_number, buffer->buffer);
3785
3786 mPendingBuffersMap.removeBuf(buffer->buffer);
3787
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003788 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003789 } else {
3790 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003791 if (i->input_buffer->release_fence != -1) {
3792 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3793 close(i->input_buffer->release_fence);
3794 if (rc != OK) {
3795 LOGE("input buffer sync wait failed %d", rc);
3796 }
3797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003798 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003799
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003800 // Put buffer into the pending request
3801 for (auto &requestedBuffer : i->buffers) {
3802 if (requestedBuffer.stream == buffer->stream) {
3803 if (requestedBuffer.buffer != nullptr) {
3804 LOGE("Error: buffer is already set");
3805 } else {
3806 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3807 sizeof(camera3_stream_buffer_t));
3808 *(requestedBuffer.buffer) = *buffer;
3809 LOGH("cache buffer %p at result frame_number %u",
3810 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003811 }
3812 }
3813 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814
3815 if (i->input_buffer) {
3816 // For a reprocessing request, try to send out shutter callback and result metadata.
3817 handlePendingResultsWithLock(frame_number, nullptr);
3818 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003819 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003820
3821 if (mPreviewStarted == false) {
3822 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3823 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3824 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3825 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3826 mPreviewStarted = true;
3827
3828 // Set power hint for preview
3829 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3830 }
3831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832}
3833
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3835 const camera_metadata_t *resultMetadata)
3836{
3837 // Find the pending request for this result metadata.
3838 auto requestIter = mPendingRequestsList.begin();
3839 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3840 requestIter++;
3841 }
3842
3843 if (requestIter == mPendingRequestsList.end()) {
3844 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3845 return;
3846 }
3847
3848 // Update the result metadata
3849 requestIter->resultMetadata = resultMetadata;
3850
3851 // Check what type of request this is.
3852 bool liveRequest = false;
3853 if (requestIter->hdrplus) {
3854 // HDR+ request doesn't have partial results.
3855 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3856 } else if (requestIter->input_buffer != nullptr) {
3857 // Reprocessing request result is the same as settings.
3858 requestIter->resultMetadata = requestIter->settings;
3859 // Reprocessing request doesn't have partial results.
3860 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3861 } else {
3862 liveRequest = true;
3863 requestIter->partial_result_cnt++;
3864 mPendingLiveRequest--;
3865
3866 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003867 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3868 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3870 }
3871 }
3872
3873 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3874 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3875 bool readyToSend = true;
3876
3877 // Iterate through the pending requests to send out shutter callbacks and results that are
3878 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3879 // live requests that don't have result metadata yet.
3880 auto iter = mPendingRequestsList.begin();
3881 while (iter != mPendingRequestsList.end()) {
3882 // Check if current pending request is ready. If it's not ready, the following pending
3883 // requests are also not ready.
3884 if (readyToSend && iter->resultMetadata == nullptr) {
3885 readyToSend = false;
3886 }
3887
3888 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3889
3890 std::vector<camera3_stream_buffer_t> outputBuffers;
3891
3892 camera3_capture_result_t result = {};
3893 result.frame_number = iter->frame_number;
3894 result.result = iter->resultMetadata;
3895 result.partial_result = iter->partial_result_cnt;
3896
3897 // If this pending buffer has result metadata, we may be able to send out shutter callback
3898 // and result metadata.
3899 if (iter->resultMetadata != nullptr) {
3900 if (!readyToSend) {
3901 // If any of the previous pending request is not ready, this pending request is
3902 // also not ready to send in order to keep shutter callbacks and result metadata
3903 // in order.
3904 iter++;
3905 continue;
3906 }
3907
3908 // Invoke shutter callback if not yet.
3909 if (!iter->shutter_notified) {
3910 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3911
3912 // Find the timestamp in HDR+ result metadata
3913 camera_metadata_ro_entry_t entry;
3914 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3915 ANDROID_SENSOR_TIMESTAMP, &entry);
3916 if (res != OK) {
3917 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3918 __FUNCTION__, iter->frame_number, strerror(-res), res);
3919 } else {
3920 timestamp = entry.data.i64[0];
3921 }
3922
3923 camera3_notify_msg_t notify_msg = {};
3924 notify_msg.type = CAMERA3_MSG_SHUTTER;
3925 notify_msg.message.shutter.frame_number = iter->frame_number;
3926 notify_msg.message.shutter.timestamp = timestamp;
3927 orchestrateNotify(&notify_msg);
3928 iter->shutter_notified = true;
3929 }
3930
3931 result.input_buffer = iter->input_buffer;
3932
3933 // Prepare output buffer array
3934 for (auto bufferInfoIter = iter->buffers.begin();
3935 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3936 if (bufferInfoIter->buffer != nullptr) {
3937
3938 QCamera3Channel *channel =
3939 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3940 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3941
3942 // Check if this buffer is a dropped frame.
3943 auto frameDropIter = mPendingFrameDropList.begin();
3944 while (frameDropIter != mPendingFrameDropList.end()) {
3945 if((frameDropIter->stream_ID == streamID) &&
3946 (frameDropIter->frame_number == frameNumber)) {
3947 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3948 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3949 streamID);
3950 mPendingFrameDropList.erase(frameDropIter);
3951 break;
3952 } else {
3953 frameDropIter++;
3954 }
3955 }
3956
3957 // Check buffer error status
3958 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3959 bufferInfoIter->buffer->buffer);
3960 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3961
3962 outputBuffers.push_back(*(bufferInfoIter->buffer));
3963 free(bufferInfoIter->buffer);
3964 bufferInfoIter->buffer = NULL;
3965 }
3966 }
3967
3968 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3969 result.num_output_buffers = outputBuffers.size();
3970 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3971 // If the result metadata belongs to a live request, notify errors for previous pending
3972 // live requests.
3973 mPendingLiveRequest--;
3974
3975 CameraMetadata dummyMetadata;
3976 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3977 result.result = dummyMetadata.release();
3978
3979 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3980 } else {
3981 iter++;
3982 continue;
3983 }
3984
3985 orchestrateResult(&result);
3986
3987 // For reprocessing, result metadata is the same as settings so do not free it here to
3988 // avoid double free.
3989 if (result.result != iter->settings) {
3990 free_camera_metadata((camera_metadata_t *)result.result);
3991 }
3992 iter->resultMetadata = nullptr;
3993 iter = erasePendingRequest(iter);
3994 }
3995
3996 if (liveRequest) {
3997 for (auto &iter : mPendingRequestsList) {
3998 // Increment pipeline depth for the following pending requests.
3999 if (iter.frame_number > frameNumber) {
4000 iter.pipeline_depth++;
4001 }
4002 }
4003 }
4004
4005 unblockRequestIfNecessary();
4006}
4007
Thierry Strudel3d639192016-09-09 11:52:26 -07004008/*===========================================================================
4009 * FUNCTION : unblockRequestIfNecessary
4010 *
4011 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4012 * that mMutex is held when this function is called.
4013 *
4014 * PARAMETERS :
4015 *
4016 * RETURN :
4017 *
4018 *==========================================================================*/
4019void QCamera3HardwareInterface::unblockRequestIfNecessary()
4020{
4021 // Unblock process_capture_request
4022 pthread_cond_signal(&mRequestCond);
4023}
4024
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004025/*===========================================================================
4026 * FUNCTION : isHdrSnapshotRequest
4027 *
4028 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4029 *
4030 * PARAMETERS : camera3 request structure
4031 *
4032 * RETURN : boolean decision variable
4033 *
4034 *==========================================================================*/
4035bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4036{
4037 if (request == NULL) {
4038 LOGE("Invalid request handle");
4039 assert(0);
4040 return false;
4041 }
4042
4043 if (!mForceHdrSnapshot) {
4044 CameraMetadata frame_settings;
4045 frame_settings = request->settings;
4046
4047 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4048 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4049 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4050 return false;
4051 }
4052 } else {
4053 return false;
4054 }
4055
4056 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4057 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4058 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4059 return false;
4060 }
4061 } else {
4062 return false;
4063 }
4064 }
4065
4066 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4067 if (request->output_buffers[i].stream->format
4068 == HAL_PIXEL_FORMAT_BLOB) {
4069 return true;
4070 }
4071 }
4072
4073 return false;
4074}
4075/*===========================================================================
4076 * FUNCTION : orchestrateRequest
4077 *
4078 * DESCRIPTION: Orchestrates a capture request from camera service
4079 *
4080 * PARAMETERS :
4081 * @request : request from framework to process
4082 *
4083 * RETURN : Error status codes
4084 *
4085 *==========================================================================*/
4086int32_t QCamera3HardwareInterface::orchestrateRequest(
4087 camera3_capture_request_t *request)
4088{
4089
4090 uint32_t originalFrameNumber = request->frame_number;
4091 uint32_t originalOutputCount = request->num_output_buffers;
4092 const camera_metadata_t *original_settings = request->settings;
4093 List<InternalRequest> internallyRequestedStreams;
4094 List<InternalRequest> emptyInternalList;
4095
4096 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4097 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4098 uint32_t internalFrameNumber;
4099 CameraMetadata modified_meta;
4100
4101
4102 /* Add Blob channel to list of internally requested streams */
4103 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4104 if (request->output_buffers[i].stream->format
4105 == HAL_PIXEL_FORMAT_BLOB) {
4106 InternalRequest streamRequested;
4107 streamRequested.meteringOnly = 1;
4108 streamRequested.need_metadata = 0;
4109 streamRequested.stream = request->output_buffers[i].stream;
4110 internallyRequestedStreams.push_back(streamRequested);
4111 }
4112 }
4113 request->num_output_buffers = 0;
4114 auto itr = internallyRequestedStreams.begin();
4115
4116 /* Modify setting to set compensation */
4117 modified_meta = request->settings;
4118 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4119 uint8_t aeLock = 1;
4120 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4121 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4122 camera_metadata_t *modified_settings = modified_meta.release();
4123 request->settings = modified_settings;
4124
4125 /* Capture Settling & -2x frame */
4126 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4127 request->frame_number = internalFrameNumber;
4128 processCaptureRequest(request, internallyRequestedStreams);
4129
4130 request->num_output_buffers = originalOutputCount;
4131 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4132 request->frame_number = internalFrameNumber;
4133 processCaptureRequest(request, emptyInternalList);
4134 request->num_output_buffers = 0;
4135
4136 modified_meta = modified_settings;
4137 expCompensation = 0;
4138 aeLock = 1;
4139 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4140 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4141 modified_settings = modified_meta.release();
4142 request->settings = modified_settings;
4143
4144 /* Capture Settling & 0X frame */
4145
4146 itr = internallyRequestedStreams.begin();
4147 if (itr == internallyRequestedStreams.end()) {
4148 LOGE("Error Internally Requested Stream list is empty");
4149 assert(0);
4150 } else {
4151 itr->need_metadata = 0;
4152 itr->meteringOnly = 1;
4153 }
4154
4155 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4156 request->frame_number = internalFrameNumber;
4157 processCaptureRequest(request, internallyRequestedStreams);
4158
4159 itr = internallyRequestedStreams.begin();
4160 if (itr == internallyRequestedStreams.end()) {
4161 ALOGE("Error Internally Requested Stream list is empty");
4162 assert(0);
4163 } else {
4164 itr->need_metadata = 1;
4165 itr->meteringOnly = 0;
4166 }
4167
4168 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4169 request->frame_number = internalFrameNumber;
4170 processCaptureRequest(request, internallyRequestedStreams);
4171
4172 /* Capture 2X frame*/
4173 modified_meta = modified_settings;
4174 expCompensation = GB_HDR_2X_STEP_EV;
4175 aeLock = 1;
4176 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4177 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4178 modified_settings = modified_meta.release();
4179 request->settings = modified_settings;
4180
4181 itr = internallyRequestedStreams.begin();
4182 if (itr == internallyRequestedStreams.end()) {
4183 ALOGE("Error Internally Requested Stream list is empty");
4184 assert(0);
4185 } else {
4186 itr->need_metadata = 0;
4187 itr->meteringOnly = 1;
4188 }
4189 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4190 request->frame_number = internalFrameNumber;
4191 processCaptureRequest(request, internallyRequestedStreams);
4192
4193 itr = internallyRequestedStreams.begin();
4194 if (itr == internallyRequestedStreams.end()) {
4195 ALOGE("Error Internally Requested Stream list is empty");
4196 assert(0);
4197 } else {
4198 itr->need_metadata = 1;
4199 itr->meteringOnly = 0;
4200 }
4201
4202 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4203 request->frame_number = internalFrameNumber;
4204 processCaptureRequest(request, internallyRequestedStreams);
4205
4206
4207 /* Capture 2X on original streaming config*/
4208 internallyRequestedStreams.clear();
4209
4210 /* Restore original settings pointer */
4211 request->settings = original_settings;
4212 } else {
4213 uint32_t internalFrameNumber;
4214 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4215 request->frame_number = internalFrameNumber;
4216 return processCaptureRequest(request, internallyRequestedStreams);
4217 }
4218
4219 return NO_ERROR;
4220}
4221
4222/*===========================================================================
4223 * FUNCTION : orchestrateResult
4224 *
4225 * DESCRIPTION: Orchestrates a capture result to camera service
4226 *
4227 * PARAMETERS :
4228 * @request : request from framework to process
4229 *
4230 * RETURN :
4231 *
4232 *==========================================================================*/
4233void QCamera3HardwareInterface::orchestrateResult(
4234 camera3_capture_result_t *result)
4235{
4236 uint32_t frameworkFrameNumber;
4237 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4238 frameworkFrameNumber);
4239 if (rc != NO_ERROR) {
4240 LOGE("Cannot find translated frameworkFrameNumber");
4241 assert(0);
4242 } else {
4243 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004244 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004245 } else {
4246 result->frame_number = frameworkFrameNumber;
4247 mCallbackOps->process_capture_result(mCallbackOps, result);
4248 }
4249 }
4250}
4251
4252/*===========================================================================
4253 * FUNCTION : orchestrateNotify
4254 *
4255 * DESCRIPTION: Orchestrates a notify to camera service
4256 *
4257 * PARAMETERS :
4258 * @request : request from framework to process
4259 *
4260 * RETURN :
4261 *
4262 *==========================================================================*/
4263void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4264{
4265 uint32_t frameworkFrameNumber;
4266 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004267 int32_t rc = NO_ERROR;
4268
4269 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004270 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004271
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004272 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004273 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4274 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4275 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004276 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004277 LOGE("Cannot find translated frameworkFrameNumber");
4278 assert(0);
4279 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004280 }
4281 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004282
4283 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4284 LOGD("Internal Request drop the notifyCb");
4285 } else {
4286 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4287 mCallbackOps->notify(mCallbackOps, notify_msg);
4288 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004289}
4290
4291/*===========================================================================
4292 * FUNCTION : FrameNumberRegistry
4293 *
4294 * DESCRIPTION: Constructor
4295 *
4296 * PARAMETERS :
4297 *
4298 * RETURN :
4299 *
4300 *==========================================================================*/
4301FrameNumberRegistry::FrameNumberRegistry()
4302{
4303 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4304}
4305
4306/*===========================================================================
4307 * FUNCTION : ~FrameNumberRegistry
4308 *
4309 * DESCRIPTION: Destructor
4310 *
4311 * PARAMETERS :
4312 *
4313 * RETURN :
4314 *
4315 *==========================================================================*/
4316FrameNumberRegistry::~FrameNumberRegistry()
4317{
4318}
4319
4320/*===========================================================================
4321 * FUNCTION : PurgeOldEntriesLocked
4322 *
4323 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4324 *
4325 * PARAMETERS :
4326 *
4327 * RETURN : NONE
4328 *
4329 *==========================================================================*/
4330void FrameNumberRegistry::purgeOldEntriesLocked()
4331{
4332 while (_register.begin() != _register.end()) {
4333 auto itr = _register.begin();
4334 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4335 _register.erase(itr);
4336 } else {
4337 return;
4338 }
4339 }
4340}
4341
4342/*===========================================================================
4343 * FUNCTION : allocStoreInternalFrameNumber
4344 *
4345 * DESCRIPTION: Method to note down a framework request and associate a new
4346 * internal request number against it
4347 *
4348 * PARAMETERS :
4349 * @fFrameNumber: Identifier given by framework
4350 * @internalFN : Output parameter which will have the newly generated internal
4351 * entry
4352 *
4353 * RETURN : Error code
4354 *
4355 *==========================================================================*/
4356int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4357 uint32_t &internalFrameNumber)
4358{
4359 Mutex::Autolock lock(mRegistryLock);
4360 internalFrameNumber = _nextFreeInternalNumber++;
4361 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4362 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4363 purgeOldEntriesLocked();
4364 return NO_ERROR;
4365}
4366
4367/*===========================================================================
4368 * FUNCTION : generateStoreInternalFrameNumber
4369 *
4370 * DESCRIPTION: Method to associate a new internal request number independent
4371 * of any associate with framework requests
4372 *
4373 * PARAMETERS :
4374 * @internalFrame#: Output parameter which will have the newly generated internal
4375 *
4376 *
4377 * RETURN : Error code
4378 *
4379 *==========================================================================*/
4380int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4381{
4382 Mutex::Autolock lock(mRegistryLock);
4383 internalFrameNumber = _nextFreeInternalNumber++;
4384 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4385 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4386 purgeOldEntriesLocked();
4387 return NO_ERROR;
4388}
4389
4390/*===========================================================================
4391 * FUNCTION : getFrameworkFrameNumber
4392 *
4393 * DESCRIPTION: Method to query the framework framenumber given an internal #
4394 *
4395 * PARAMETERS :
4396 * @internalFrame#: Internal reference
4397 * @frameworkframenumber: Output parameter holding framework frame entry
4398 *
4399 * RETURN : Error code
4400 *
4401 *==========================================================================*/
4402int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4403 uint32_t &frameworkFrameNumber)
4404{
4405 Mutex::Autolock lock(mRegistryLock);
4406 auto itr = _register.find(internalFrameNumber);
4407 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004408 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004409 return -ENOENT;
4410 }
4411
4412 frameworkFrameNumber = itr->second;
4413 purgeOldEntriesLocked();
4414 return NO_ERROR;
4415}
Thierry Strudel3d639192016-09-09 11:52:26 -07004416
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004417status_t QCamera3HardwareInterface::fillPbStreamConfig(
4418 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4419 QCamera3Channel *channel, uint32_t streamIndex) {
4420 if (config == nullptr) {
4421 LOGE("%s: config is null", __FUNCTION__);
4422 return BAD_VALUE;
4423 }
4424
4425 if (channel == nullptr) {
4426 LOGE("%s: channel is null", __FUNCTION__);
4427 return BAD_VALUE;
4428 }
4429
4430 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4431 if (stream == nullptr) {
4432 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4433 return NAME_NOT_FOUND;
4434 }
4435
4436 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4437 if (streamInfo == nullptr) {
4438 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4439 return NAME_NOT_FOUND;
4440 }
4441
4442 config->id = pbStreamId;
4443 config->image.width = streamInfo->dim.width;
4444 config->image.height = streamInfo->dim.height;
4445 config->image.padding = 0;
4446 config->image.format = pbStreamFormat;
4447
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004448 uint32_t totalPlaneSize = 0;
4449
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004450 // Fill plane information.
4451 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4452 pbcamera::PlaneConfiguration plane;
4453 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4454 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4455 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004456
4457 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004458 }
4459
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004460 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004461 return OK;
4462}
4463
Thierry Strudel3d639192016-09-09 11:52:26 -07004464/*===========================================================================
4465 * FUNCTION : processCaptureRequest
4466 *
4467 * DESCRIPTION: process a capture request from camera service
4468 *
4469 * PARAMETERS :
4470 * @request : request from framework to process
4471 *
4472 * RETURN :
4473 *
4474 *==========================================================================*/
4475int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004476 camera3_capture_request_t *request,
4477 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004478{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004479 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004480 int rc = NO_ERROR;
4481 int32_t request_id;
4482 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004483 bool isVidBufRequested = false;
4484 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004485 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004486
4487 pthread_mutex_lock(&mMutex);
4488
4489 // Validate current state
4490 switch (mState) {
4491 case CONFIGURED:
4492 case STARTED:
4493 /* valid state */
4494 break;
4495
4496 case ERROR:
4497 pthread_mutex_unlock(&mMutex);
4498 handleCameraDeviceError();
4499 return -ENODEV;
4500
4501 default:
4502 LOGE("Invalid state %d", mState);
4503 pthread_mutex_unlock(&mMutex);
4504 return -ENODEV;
4505 }
4506
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004507 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004508 if (rc != NO_ERROR) {
4509 LOGE("incoming request is not valid");
4510 pthread_mutex_unlock(&mMutex);
4511 return rc;
4512 }
4513
4514 meta = request->settings;
4515
4516 // For first capture request, send capture intent, and
4517 // stream on all streams
4518 if (mState == CONFIGURED) {
4519 // send an unconfigure to the backend so that the isp
4520 // resources are deallocated
4521 if (!mFirstConfiguration) {
4522 cam_stream_size_info_t stream_config_info;
4523 int32_t hal_version = CAM_HAL_V3;
4524 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4525 stream_config_info.buffer_info.min_buffers =
4526 MIN_INFLIGHT_REQUESTS;
4527 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004528 m_bIs4KVideo ? 0 :
4529 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004530 clear_metadata_buffer(mParameters);
4531 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4532 CAM_INTF_PARM_HAL_VERSION, hal_version);
4533 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4534 CAM_INTF_META_STREAM_INFO, stream_config_info);
4535 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4536 mParameters);
4537 if (rc < 0) {
4538 LOGE("set_parms for unconfigure failed");
4539 pthread_mutex_unlock(&mMutex);
4540 return rc;
4541 }
4542 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004543 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004544 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004545 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004546 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004547 property_get("persist.camera.is_type", is_type_value, "4");
4548 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4549 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4550 property_get("persist.camera.is_type_preview", is_type_value, "4");
4551 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4552 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004553
4554 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4555 int32_t hal_version = CAM_HAL_V3;
4556 uint8_t captureIntent =
4557 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4558 mCaptureIntent = captureIntent;
4559 clear_metadata_buffer(mParameters);
4560 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4561 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4562 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004563 if (mFirstConfiguration) {
4564 // configure instant AEC
4565 // Instant AEC is a session based parameter and it is needed only
4566 // once per complete session after open camera.
4567 // i.e. This is set only once for the first capture request, after open camera.
4568 setInstantAEC(meta);
4569 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004570 uint8_t fwkVideoStabMode=0;
4571 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4572 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4573 }
4574
4575 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4576 // turn it on for video/preview
4577 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4578 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004579 int32_t vsMode;
4580 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4582 rc = BAD_VALUE;
4583 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004584 LOGD("setEis %d", setEis);
4585 bool eis3Supported = false;
4586 size_t count = IS_TYPE_MAX;
4587 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4588 for (size_t i = 0; i < count; i++) {
4589 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4590 eis3Supported = true;
4591 break;
4592 }
4593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004594
4595 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004596 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004597 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4598 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004599 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4600 is_type = isTypePreview;
4601 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4602 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4603 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004604 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004605 } else {
4606 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004607 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004608 } else {
4609 is_type = IS_TYPE_NONE;
4610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004612 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004613 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4614 }
4615 }
4616
4617 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4618 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4619
Thierry Strudel54dc9782017-02-15 12:12:10 -08004620 //Disable tintless only if the property is set to 0
4621 memset(prop, 0, sizeof(prop));
4622 property_get("persist.camera.tintless.enable", prop, "1");
4623 int32_t tintless_value = atoi(prop);
4624
Thierry Strudel3d639192016-09-09 11:52:26 -07004625 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4626 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004627
Thierry Strudel3d639192016-09-09 11:52:26 -07004628 //Disable CDS for HFR mode or if DIS/EIS is on.
4629 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4630 //after every configure_stream
4631 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4632 (m_bIsVideo)) {
4633 int32_t cds = CAM_CDS_MODE_OFF;
4634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4635 CAM_INTF_PARM_CDS_MODE, cds))
4636 LOGE("Failed to disable CDS for HFR mode");
4637
4638 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004639
4640 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4641 uint8_t* use_av_timer = NULL;
4642
4643 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004644 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004645 use_av_timer = &m_debug_avtimer;
4646 }
4647 else{
4648 use_av_timer =
4649 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004650 if (use_av_timer) {
4651 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4652 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004653 }
4654
4655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4656 rc = BAD_VALUE;
4657 }
4658 }
4659
Thierry Strudel3d639192016-09-09 11:52:26 -07004660 setMobicat();
4661
4662 /* Set fps and hfr mode while sending meta stream info so that sensor
4663 * can configure appropriate streaming mode */
4664 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004665 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4666 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004667 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4668 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004669 if (rc == NO_ERROR) {
4670 int32_t max_fps =
4671 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004672 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004673 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4674 }
4675 /* For HFR, more buffers are dequeued upfront to improve the performance */
4676 if (mBatchSize) {
4677 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4678 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4679 }
4680 }
4681 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004682 LOGE("setHalFpsRange failed");
4683 }
4684 }
4685 if (meta.exists(ANDROID_CONTROL_MODE)) {
4686 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4687 rc = extractSceneMode(meta, metaMode, mParameters);
4688 if (rc != NO_ERROR) {
4689 LOGE("extractSceneMode failed");
4690 }
4691 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004692 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004693
Thierry Strudel04e026f2016-10-10 11:27:36 -07004694 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4695 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4696 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4697 rc = setVideoHdrMode(mParameters, vhdr);
4698 if (rc != NO_ERROR) {
4699 LOGE("setVideoHDR is failed");
4700 }
4701 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004702
Thierry Strudel3d639192016-09-09 11:52:26 -07004703 //TODO: validate the arguments, HSV scenemode should have only the
4704 //advertised fps ranges
4705
4706 /*set the capture intent, hal version, tintless, stream info,
4707 *and disenable parameters to the backend*/
4708 LOGD("set_parms META_STREAM_INFO " );
4709 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004710 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4711 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004712 mStreamConfigInfo.type[i],
4713 mStreamConfigInfo.stream_sizes[i].width,
4714 mStreamConfigInfo.stream_sizes[i].height,
4715 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004716 mStreamConfigInfo.format[i],
4717 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004718 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004719
Thierry Strudel3d639192016-09-09 11:52:26 -07004720 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4721 mParameters);
4722 if (rc < 0) {
4723 LOGE("set_parms failed for hal version, stream info");
4724 }
4725
Chien-Yu Chenee335912017-02-09 17:53:20 -08004726 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4727 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004728 if (rc != NO_ERROR) {
4729 LOGE("Failed to get sensor output size");
4730 pthread_mutex_unlock(&mMutex);
4731 goto error_exit;
4732 }
4733
4734 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4735 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004736 mSensorModeInfo.active_array_size.width,
4737 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004738
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004739 if (gHdrPlusClient != nullptr) {
4740 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4741 if (rc != OK) {
4742 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4743 mCameraId, mSensorModeInfo.op_pixel_clk);
4744 pthread_mutex_unlock(&mMutex);
4745 goto error_exit;
4746 }
4747 }
4748
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 /* Set batchmode before initializing channel. Since registerBuffer
4750 * internally initializes some of the channels, better set batchmode
4751 * even before first register buffer */
4752 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4753 it != mStreamInfo.end(); it++) {
4754 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4755 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4756 && mBatchSize) {
4757 rc = channel->setBatchSize(mBatchSize);
4758 //Disable per frame map unmap for HFR/batchmode case
4759 rc |= channel->setPerFrameMapUnmap(false);
4760 if (NO_ERROR != rc) {
4761 LOGE("Channel init failed %d", rc);
4762 pthread_mutex_unlock(&mMutex);
4763 goto error_exit;
4764 }
4765 }
4766 }
4767
4768 //First initialize all streams
4769 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4770 it != mStreamInfo.end(); it++) {
4771 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4772 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4773 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004774 setEis) {
4775 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4776 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4777 is_type = mStreamConfigInfo.is_type[i];
4778 break;
4779 }
4780 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004782 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 rc = channel->initialize(IS_TYPE_NONE);
4784 }
4785 if (NO_ERROR != rc) {
4786 LOGE("Channel initialization failed %d", rc);
4787 pthread_mutex_unlock(&mMutex);
4788 goto error_exit;
4789 }
4790 }
4791
4792 if (mRawDumpChannel) {
4793 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4794 if (rc != NO_ERROR) {
4795 LOGE("Error: Raw Dump Channel init failed");
4796 pthread_mutex_unlock(&mMutex);
4797 goto error_exit;
4798 }
4799 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004800 if (mHdrPlusRawSrcChannel) {
4801 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4802 if (rc != NO_ERROR) {
4803 LOGE("Error: HDR+ RAW Source Channel init failed");
4804 pthread_mutex_unlock(&mMutex);
4805 goto error_exit;
4806 }
4807 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004808 if (mSupportChannel) {
4809 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4810 if (rc < 0) {
4811 LOGE("Support channel initialization failed");
4812 pthread_mutex_unlock(&mMutex);
4813 goto error_exit;
4814 }
4815 }
4816 if (mAnalysisChannel) {
4817 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4818 if (rc < 0) {
4819 LOGE("Analysis channel initialization failed");
4820 pthread_mutex_unlock(&mMutex);
4821 goto error_exit;
4822 }
4823 }
4824 if (mDummyBatchChannel) {
4825 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4826 if (rc < 0) {
4827 LOGE("mDummyBatchChannel setBatchSize failed");
4828 pthread_mutex_unlock(&mMutex);
4829 goto error_exit;
4830 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004831 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 if (rc < 0) {
4833 LOGE("mDummyBatchChannel initialization failed");
4834 pthread_mutex_unlock(&mMutex);
4835 goto error_exit;
4836 }
4837 }
4838
4839 // Set bundle info
4840 rc = setBundleInfo();
4841 if (rc < 0) {
4842 LOGE("setBundleInfo failed %d", rc);
4843 pthread_mutex_unlock(&mMutex);
4844 goto error_exit;
4845 }
4846
4847 //update settings from app here
4848 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4849 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4850 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4851 }
4852 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4853 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4854 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4855 }
4856 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4857 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4858 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4859
4860 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4861 (mLinkedCameraId != mCameraId) ) {
4862 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4863 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 goto error_exit;
4866 }
4867 }
4868
4869 // add bundle related cameras
4870 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4871 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004872 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4873 &m_pDualCamCmdPtr->bundle_info;
4874 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 if (mIsDeviceLinked)
4876 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4877 else
4878 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4879
4880 pthread_mutex_lock(&gCamLock);
4881
4882 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4883 LOGE("Dualcam: Invalid Session Id ");
4884 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004885 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004886 goto error_exit;
4887 }
4888
4889 if (mIsMainCamera == 1) {
4890 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4891 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004892 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004893 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 // related session id should be session id of linked session
4895 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4896 } else {
4897 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4898 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004899 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004900 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004901 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4902 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004903 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 pthread_mutex_unlock(&gCamLock);
4905
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004906 rc = mCameraHandle->ops->set_dual_cam_cmd(
4907 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004908 if (rc < 0) {
4909 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004910 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 goto error_exit;
4912 }
4913 }
4914
4915 //Then start them.
4916 LOGH("Start META Channel");
4917 rc = mMetadataChannel->start();
4918 if (rc < 0) {
4919 LOGE("META channel start failed");
4920 pthread_mutex_unlock(&mMutex);
4921 goto error_exit;
4922 }
4923
4924 if (mAnalysisChannel) {
4925 rc = mAnalysisChannel->start();
4926 if (rc < 0) {
4927 LOGE("Analysis channel start failed");
4928 mMetadataChannel->stop();
4929 pthread_mutex_unlock(&mMutex);
4930 goto error_exit;
4931 }
4932 }
4933
4934 if (mSupportChannel) {
4935 rc = mSupportChannel->start();
4936 if (rc < 0) {
4937 LOGE("Support channel start failed");
4938 mMetadataChannel->stop();
4939 /* Although support and analysis are mutually exclusive today
4940 adding it in anycase for future proofing */
4941 if (mAnalysisChannel) {
4942 mAnalysisChannel->stop();
4943 }
4944 pthread_mutex_unlock(&mMutex);
4945 goto error_exit;
4946 }
4947 }
4948 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4949 it != mStreamInfo.end(); it++) {
4950 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4951 LOGH("Start Processing Channel mask=%d",
4952 channel->getStreamTypeMask());
4953 rc = channel->start();
4954 if (rc < 0) {
4955 LOGE("channel start failed");
4956 pthread_mutex_unlock(&mMutex);
4957 goto error_exit;
4958 }
4959 }
4960
4961 if (mRawDumpChannel) {
4962 LOGD("Starting raw dump stream");
4963 rc = mRawDumpChannel->start();
4964 if (rc != NO_ERROR) {
4965 LOGE("Error Starting Raw Dump Channel");
4966 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4967 it != mStreamInfo.end(); it++) {
4968 QCamera3Channel *channel =
4969 (QCamera3Channel *)(*it)->stream->priv;
4970 LOGH("Stopping Processing Channel mask=%d",
4971 channel->getStreamTypeMask());
4972 channel->stop();
4973 }
4974 if (mSupportChannel)
4975 mSupportChannel->stop();
4976 if (mAnalysisChannel) {
4977 mAnalysisChannel->stop();
4978 }
4979 mMetadataChannel->stop();
4980 pthread_mutex_unlock(&mMutex);
4981 goto error_exit;
4982 }
4983 }
4984
4985 if (mChannelHandle) {
4986
4987 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4988 mChannelHandle);
4989 if (rc != NO_ERROR) {
4990 LOGE("start_channel failed %d", rc);
4991 pthread_mutex_unlock(&mMutex);
4992 goto error_exit;
4993 }
4994 }
4995
4996 goto no_error;
4997error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004998 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004999 return rc;
5000no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 mWokenUpByDaemon = false;
5002 mPendingLiveRequest = 0;
5003 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 }
5005
Chien-Yu Chenee335912017-02-09 17:53:20 -08005006 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005007 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08005008 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5009 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5010 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5011 rc = enableHdrPlusModeLocked();
5012 if (rc != OK) {
5013 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5014 pthread_mutex_unlock(&mMutex);
5015 return rc;
5016 }
5017
5018 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5019 if (mHdrPlusRawSrcChannel) {
5020 rc = mHdrPlusRawSrcChannel->start();
5021 if (rc != OK) {
5022 LOGE("Error Starting HDR+ RAW Channel");
5023 pthread_mutex_unlock(&mMutex);
5024 return rc;
5025 }
5026 }
5027 mFirstPreviewIntentSeen = true;
5028 }
5029
Thierry Strudel3d639192016-09-09 11:52:26 -07005030 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005031 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005032
5033 if (mFlushPerf) {
5034 //we cannot accept any requests during flush
5035 LOGE("process_capture_request cannot proceed during flush");
5036 pthread_mutex_unlock(&mMutex);
5037 return NO_ERROR; //should return an error
5038 }
5039
5040 if (meta.exists(ANDROID_REQUEST_ID)) {
5041 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5042 mCurrentRequestId = request_id;
5043 LOGD("Received request with id: %d", request_id);
5044 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5045 LOGE("Unable to find request id field, \
5046 & no previous id available");
5047 pthread_mutex_unlock(&mMutex);
5048 return NAME_NOT_FOUND;
5049 } else {
5050 LOGD("Re-using old request id");
5051 request_id = mCurrentRequestId;
5052 }
5053
5054 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5055 request->num_output_buffers,
5056 request->input_buffer,
5057 frameNumber);
5058 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005059 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005061 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005062 uint32_t snapshotStreamId = 0;
5063 for (size_t i = 0; i < request->num_output_buffers; i++) {
5064 const camera3_stream_buffer_t& output = request->output_buffers[i];
5065 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5066
Emilian Peev7650c122017-01-19 08:24:33 -08005067 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5068 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005069 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005070 blob_request = 1;
5071 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5072 }
5073
5074 if (output.acquire_fence != -1) {
5075 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5076 close(output.acquire_fence);
5077 if (rc != OK) {
5078 LOGE("sync wait failed %d", rc);
5079 pthread_mutex_unlock(&mMutex);
5080 return rc;
5081 }
5082 }
5083
Emilian Peev7650c122017-01-19 08:24:33 -08005084 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
5085 depthRequestPresent = true;
5086 continue;
5087 }
5088
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005089 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005091
5092 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5093 isVidBufRequested = true;
5094 }
5095 }
5096
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005097 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5098 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5099 itr++) {
5100 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5101 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5102 channel->getStreamID(channel->getStreamTypeMask());
5103
5104 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5105 isVidBufRequested = true;
5106 }
5107 }
5108
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005110 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005111 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 }
5113 if (blob_request && mRawDumpChannel) {
5114 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005115 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005116 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005117 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 }
5119
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005120 {
5121 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5122 // Request a RAW buffer if
5123 // 1. mHdrPlusRawSrcChannel is valid.
5124 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5125 // 3. There is no pending HDR+ request.
5126 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5127 mHdrPlusPendingRequests.size() == 0) {
5128 streamsArray.stream_request[streamsArray.num_streams].streamID =
5129 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5130 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5131 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005132 }
5133
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005134 //extract capture intent
5135 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5136 mCaptureIntent =
5137 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5138 }
5139
5140 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5141 mCacMode =
5142 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5143 }
5144
5145 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005146 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005147
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005148 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005149 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005150 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5151 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005152 }
5153
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005154 if (hdrPlusRequest) {
5155 // For a HDR+ request, just set the frame parameters.
5156 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5157 if (rc < 0) {
5158 LOGE("fail to set frame parameters");
5159 pthread_mutex_unlock(&mMutex);
5160 return rc;
5161 }
5162 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 /* Parse the settings:
5164 * - For every request in NORMAL MODE
5165 * - For every request in HFR mode during preview only case
5166 * - For first request of every batch in HFR mode during video
5167 * recording. In batchmode the same settings except frame number is
5168 * repeated in each request of the batch.
5169 */
5170 if (!mBatchSize ||
5171 (mBatchSize && !isVidBufRequested) ||
5172 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005173 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (rc < 0) {
5175 LOGE("fail to set frame parameters");
5176 pthread_mutex_unlock(&mMutex);
5177 return rc;
5178 }
5179 }
5180 /* For batchMode HFR, setFrameParameters is not called for every
5181 * request. But only frame number of the latest request is parsed.
5182 * Keep track of first and last frame numbers in a batch so that
5183 * metadata for the frame numbers of batch can be duplicated in
5184 * handleBatchMetadta */
5185 if (mBatchSize) {
5186 if (!mToBeQueuedVidBufs) {
5187 //start of the batch
5188 mFirstFrameNumberInBatch = request->frame_number;
5189 }
5190 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5191 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5192 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005193 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005194 return BAD_VALUE;
5195 }
5196 }
5197 if (mNeedSensorRestart) {
5198 /* Unlock the mutex as restartSensor waits on the channels to be
5199 * stopped, which in turn calls stream callback functions -
5200 * handleBufferWithLock and handleMetadataWithLock */
5201 pthread_mutex_unlock(&mMutex);
5202 rc = dynamicUpdateMetaStreamInfo();
5203 if (rc != NO_ERROR) {
5204 LOGE("Restarting the sensor failed");
5205 return BAD_VALUE;
5206 }
5207 mNeedSensorRestart = false;
5208 pthread_mutex_lock(&mMutex);
5209 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005210 if(mResetInstantAEC) {
5211 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5212 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5213 mResetInstantAEC = false;
5214 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005215 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 if (request->input_buffer->acquire_fence != -1) {
5217 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5218 close(request->input_buffer->acquire_fence);
5219 if (rc != OK) {
5220 LOGE("input buffer sync wait failed %d", rc);
5221 pthread_mutex_unlock(&mMutex);
5222 return rc;
5223 }
5224 }
5225 }
5226
5227 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5228 mLastCustIntentFrmNum = frameNumber;
5229 }
5230 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005231 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 pendingRequestIterator latestRequest;
5233 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005234 pendingRequest.num_buffers = depthRequestPresent ?
5235 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236 pendingRequest.request_id = request_id;
5237 pendingRequest.blob_request = blob_request;
5238 pendingRequest.timestamp = 0;
5239 pendingRequest.bUrgentReceived = 0;
5240 if (request->input_buffer) {
5241 pendingRequest.input_buffer =
5242 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5243 *(pendingRequest.input_buffer) = *(request->input_buffer);
5244 pInputBuffer = pendingRequest.input_buffer;
5245 } else {
5246 pendingRequest.input_buffer = NULL;
5247 pInputBuffer = NULL;
5248 }
5249
5250 pendingRequest.pipeline_depth = 0;
5251 pendingRequest.partial_result_cnt = 0;
5252 extractJpegMetadata(mCurJpegMeta, request);
5253 pendingRequest.jpegMetadata = mCurJpegMeta;
5254 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5255 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005257 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5258 mHybridAeEnable =
5259 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5260 }
5261 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005262 /* DevCamDebug metadata processCaptureRequest */
5263 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5264 mDevCamDebugMetaEnable =
5265 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5266 }
5267 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5268 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005269
5270 //extract CAC info
5271 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5272 mCacMode =
5273 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5274 }
5275 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005276 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005277
5278 PendingBuffersInRequest bufsForCurRequest;
5279 bufsForCurRequest.frame_number = frameNumber;
5280 // Mark current timestamp for the new request
5281 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005282 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005283
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005284 if (hdrPlusRequest) {
5285 // Save settings for this request.
5286 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5287 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5288
5289 // Add to pending HDR+ request queue.
5290 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5291 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5292
5293 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5294 }
5295
Thierry Strudel3d639192016-09-09 11:52:26 -07005296 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005297 if (request->output_buffers[i].stream->data_space ==
5298 HAL_DATASPACE_DEPTH) {
5299 continue;
5300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 RequestedBufferInfo requestedBuf;
5302 memset(&requestedBuf, 0, sizeof(requestedBuf));
5303 requestedBuf.stream = request->output_buffers[i].stream;
5304 requestedBuf.buffer = NULL;
5305 pendingRequest.buffers.push_back(requestedBuf);
5306
5307 // Add to buffer handle the pending buffers list
5308 PendingBufferInfo bufferInfo;
5309 bufferInfo.buffer = request->output_buffers[i].buffer;
5310 bufferInfo.stream = request->output_buffers[i].stream;
5311 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5312 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5313 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5314 frameNumber, bufferInfo.buffer,
5315 channel->getStreamTypeMask(), bufferInfo.stream->format);
5316 }
5317 // Add this request packet into mPendingBuffersMap
5318 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5319 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5320 mPendingBuffersMap.get_num_overall_buffers());
5321
5322 latestRequest = mPendingRequestsList.insert(
5323 mPendingRequestsList.end(), pendingRequest);
5324 if(mFlush) {
5325 LOGI("mFlush is true");
5326 pthread_mutex_unlock(&mMutex);
5327 return NO_ERROR;
5328 }
5329
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5331 // channel.
5332 if (!hdrPlusRequest) {
5333 int indexUsed;
5334 // Notify metadata channel we receive a request
5335 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005336
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005337 if(request->input_buffer != NULL){
5338 LOGD("Input request, frame_number %d", frameNumber);
5339 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5340 if (NO_ERROR != rc) {
5341 LOGE("fail to set reproc parameters");
5342 pthread_mutex_unlock(&mMutex);
5343 return rc;
5344 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005345 }
5346
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005347 // Call request on other streams
5348 uint32_t streams_need_metadata = 0;
5349 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5350 for (size_t i = 0; i < request->num_output_buffers; i++) {
5351 const camera3_stream_buffer_t& output = request->output_buffers[i];
5352 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5353
5354 if (channel == NULL) {
5355 LOGW("invalid channel pointer for stream");
5356 continue;
5357 }
5358
5359 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5360 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5361 output.buffer, request->input_buffer, frameNumber);
5362 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005363 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005364 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5365 if (rc < 0) {
5366 LOGE("Fail to request on picture channel");
5367 pthread_mutex_unlock(&mMutex);
5368 return rc;
5369 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005371 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5372 assert(NULL != mDepthChannel);
5373 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005374
Emilian Peev7650c122017-01-19 08:24:33 -08005375 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5376 if (rc < 0) {
5377 LOGE("Fail to map on depth buffer");
5378 pthread_mutex_unlock(&mMutex);
5379 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005380 }
Emilian Peev7650c122017-01-19 08:24:33 -08005381 } else {
5382 LOGD("snapshot request with buffer %p, frame_number %d",
5383 output.buffer, frameNumber);
5384 if (!request->settings) {
5385 rc = channel->request(output.buffer, frameNumber,
5386 NULL, mPrevParameters, indexUsed);
5387 } else {
5388 rc = channel->request(output.buffer, frameNumber,
5389 NULL, mParameters, indexUsed);
5390 }
5391 if (rc < 0) {
5392 LOGE("Fail to request on picture channel");
5393 pthread_mutex_unlock(&mMutex);
5394 return rc;
5395 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005396
Emilian Peev7650c122017-01-19 08:24:33 -08005397 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5398 uint32_t j = 0;
5399 for (j = 0; j < streamsArray.num_streams; j++) {
5400 if (streamsArray.stream_request[j].streamID == streamId) {
5401 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5402 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5403 else
5404 streamsArray.stream_request[j].buf_index = indexUsed;
5405 break;
5406 }
5407 }
5408 if (j == streamsArray.num_streams) {
5409 LOGE("Did not find matching stream to update index");
5410 assert(0);
5411 }
5412
5413 pendingBufferIter->need_metadata = true;
5414 streams_need_metadata++;
5415 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005416 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005417 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5418 bool needMetadata = false;
5419 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5420 rc = yuvChannel->request(output.buffer, frameNumber,
5421 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5422 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005424 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005425 pthread_mutex_unlock(&mMutex);
5426 return rc;
5427 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005428
5429 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5430 uint32_t j = 0;
5431 for (j = 0; j < streamsArray.num_streams; j++) {
5432 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005433 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5434 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5435 else
5436 streamsArray.stream_request[j].buf_index = indexUsed;
5437 break;
5438 }
5439 }
5440 if (j == streamsArray.num_streams) {
5441 LOGE("Did not find matching stream to update index");
5442 assert(0);
5443 }
5444
5445 pendingBufferIter->need_metadata = needMetadata;
5446 if (needMetadata)
5447 streams_need_metadata += 1;
5448 LOGD("calling YUV channel request, need_metadata is %d",
5449 needMetadata);
5450 } else {
5451 LOGD("request with buffer %p, frame_number %d",
5452 output.buffer, frameNumber);
5453
5454 rc = channel->request(output.buffer, frameNumber, indexUsed);
5455
5456 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5457 uint32_t j = 0;
5458 for (j = 0; j < streamsArray.num_streams; j++) {
5459 if (streamsArray.stream_request[j].streamID == streamId) {
5460 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5461 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5462 else
5463 streamsArray.stream_request[j].buf_index = indexUsed;
5464 break;
5465 }
5466 }
5467 if (j == streamsArray.num_streams) {
5468 LOGE("Did not find matching stream to update index");
5469 assert(0);
5470 }
5471
5472 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5473 && mBatchSize) {
5474 mToBeQueuedVidBufs++;
5475 if (mToBeQueuedVidBufs == mBatchSize) {
5476 channel->queueBatchBuf();
5477 }
5478 }
5479 if (rc < 0) {
5480 LOGE("request failed");
5481 pthread_mutex_unlock(&mMutex);
5482 return rc;
5483 }
5484 }
5485 pendingBufferIter++;
5486 }
5487
5488 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5489 itr++) {
5490 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5491
5492 if (channel == NULL) {
5493 LOGE("invalid channel pointer for stream");
5494 assert(0);
5495 return BAD_VALUE;
5496 }
5497
5498 InternalRequest requestedStream;
5499 requestedStream = (*itr);
5500
5501
5502 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5503 LOGD("snapshot request internally input buffer %p, frame_number %d",
5504 request->input_buffer, frameNumber);
5505 if(request->input_buffer != NULL){
5506 rc = channel->request(NULL, frameNumber,
5507 pInputBuffer, &mReprocMeta, indexUsed, true,
5508 requestedStream.meteringOnly);
5509 if (rc < 0) {
5510 LOGE("Fail to request on picture channel");
5511 pthread_mutex_unlock(&mMutex);
5512 return rc;
5513 }
5514 } else {
5515 LOGD("snapshot request with frame_number %d", frameNumber);
5516 if (!request->settings) {
5517 rc = channel->request(NULL, frameNumber,
5518 NULL, mPrevParameters, indexUsed, true,
5519 requestedStream.meteringOnly);
5520 } else {
5521 rc = channel->request(NULL, frameNumber,
5522 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5523 }
5524 if (rc < 0) {
5525 LOGE("Fail to request on picture channel");
5526 pthread_mutex_unlock(&mMutex);
5527 return rc;
5528 }
5529
5530 if ((*itr).meteringOnly != 1) {
5531 requestedStream.need_metadata = 1;
5532 streams_need_metadata++;
5533 }
5534 }
5535
5536 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5537 uint32_t j = 0;
5538 for (j = 0; j < streamsArray.num_streams; j++) {
5539 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005540 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5541 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5542 else
5543 streamsArray.stream_request[j].buf_index = indexUsed;
5544 break;
5545 }
5546 }
5547 if (j == streamsArray.num_streams) {
5548 LOGE("Did not find matching stream to update index");
5549 assert(0);
5550 }
5551
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005552 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005554 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005556 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005558 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005559
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005560 //If 2 streams have need_metadata set to true, fail the request, unless
5561 //we copy/reference count the metadata buffer
5562 if (streams_need_metadata > 1) {
5563 LOGE("not supporting request in which two streams requires"
5564 " 2 HAL metadata for reprocessing");
5565 pthread_mutex_unlock(&mMutex);
5566 return -EINVAL;
5567 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005568
Emilian Peev7650c122017-01-19 08:24:33 -08005569 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5571 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5572 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5573 pthread_mutex_unlock(&mMutex);
5574 return BAD_VALUE;
5575 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005576 if (request->input_buffer == NULL) {
5577 /* Set the parameters to backend:
5578 * - For every request in NORMAL MODE
5579 * - For every request in HFR mode during preview only case
5580 * - Once every batch in HFR mode during video recording
5581 */
5582 if (!mBatchSize ||
5583 (mBatchSize && !isVidBufRequested) ||
5584 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5585 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5586 mBatchSize, isVidBufRequested,
5587 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005588
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5590 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5591 uint32_t m = 0;
5592 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5593 if (streamsArray.stream_request[k].streamID ==
5594 mBatchedStreamsArray.stream_request[m].streamID)
5595 break;
5596 }
5597 if (m == mBatchedStreamsArray.num_streams) {
5598 mBatchedStreamsArray.stream_request\
5599 [mBatchedStreamsArray.num_streams].streamID =
5600 streamsArray.stream_request[k].streamID;
5601 mBatchedStreamsArray.stream_request\
5602 [mBatchedStreamsArray.num_streams].buf_index =
5603 streamsArray.stream_request[k].buf_index;
5604 mBatchedStreamsArray.num_streams =
5605 mBatchedStreamsArray.num_streams + 1;
5606 }
5607 }
5608 streamsArray = mBatchedStreamsArray;
5609 }
5610 /* Update stream id of all the requested buffers */
5611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5612 streamsArray)) {
5613 LOGE("Failed to set stream type mask in the parameters");
5614 return BAD_VALUE;
5615 }
5616
5617 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5618 mParameters);
5619 if (rc < 0) {
5620 LOGE("set_parms failed");
5621 }
5622 /* reset to zero coz, the batch is queued */
5623 mToBeQueuedVidBufs = 0;
5624 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5625 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5626 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005627 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5628 uint32_t m = 0;
5629 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5630 if (streamsArray.stream_request[k].streamID ==
5631 mBatchedStreamsArray.stream_request[m].streamID)
5632 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005633 }
5634 if (m == mBatchedStreamsArray.num_streams) {
5635 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5636 streamID = streamsArray.stream_request[k].streamID;
5637 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5638 buf_index = streamsArray.stream_request[k].buf_index;
5639 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5640 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005641 }
5642 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 }
5646
5647 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5648
5649 mState = STARTED;
5650 // Added a timed condition wait
5651 struct timespec ts;
5652 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005653 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005654 if (rc < 0) {
5655 isValidTimeout = 0;
5656 LOGE("Error reading the real time clock!!");
5657 }
5658 else {
5659 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005660 int64_t timeout = 5;
5661 {
5662 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5663 // If there is a pending HDR+ request, the following requests may be blocked until the
5664 // HDR+ request is done. So allow a longer timeout.
5665 if (mHdrPlusPendingRequests.size() > 0) {
5666 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5667 }
5668 }
5669 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005670 }
5671 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005672 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005673 (mState != ERROR) && (mState != DEINIT)) {
5674 if (!isValidTimeout) {
5675 LOGD("Blocking on conditional wait");
5676 pthread_cond_wait(&mRequestCond, &mMutex);
5677 }
5678 else {
5679 LOGD("Blocking on timed conditional wait");
5680 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5681 if (rc == ETIMEDOUT) {
5682 rc = -ENODEV;
5683 LOGE("Unblocked on timeout!!!!");
5684 break;
5685 }
5686 }
5687 LOGD("Unblocked");
5688 if (mWokenUpByDaemon) {
5689 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005690 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005691 break;
5692 }
5693 }
5694 pthread_mutex_unlock(&mMutex);
5695
5696 return rc;
5697}
5698
5699/*===========================================================================
5700 * FUNCTION : dump
5701 *
5702 * DESCRIPTION:
5703 *
5704 * PARAMETERS :
5705 *
5706 *
5707 * RETURN :
5708 *==========================================================================*/
5709void QCamera3HardwareInterface::dump(int fd)
5710{
5711 pthread_mutex_lock(&mMutex);
5712 dprintf(fd, "\n Camera HAL3 information Begin \n");
5713
5714 dprintf(fd, "\nNumber of pending requests: %zu \n",
5715 mPendingRequestsList.size());
5716 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5717 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5718 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5719 for(pendingRequestIterator i = mPendingRequestsList.begin();
5720 i != mPendingRequestsList.end(); i++) {
5721 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5722 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5723 i->input_buffer);
5724 }
5725 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5726 mPendingBuffersMap.get_num_overall_buffers());
5727 dprintf(fd, "-------+------------------\n");
5728 dprintf(fd, " Frame | Stream type mask \n");
5729 dprintf(fd, "-------+------------------\n");
5730 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5731 for(auto &j : req.mPendingBufferList) {
5732 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5733 dprintf(fd, " %5d | %11d \n",
5734 req.frame_number, channel->getStreamTypeMask());
5735 }
5736 }
5737 dprintf(fd, "-------+------------------\n");
5738
5739 dprintf(fd, "\nPending frame drop list: %zu\n",
5740 mPendingFrameDropList.size());
5741 dprintf(fd, "-------+-----------\n");
5742 dprintf(fd, " Frame | Stream ID \n");
5743 dprintf(fd, "-------+-----------\n");
5744 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5745 i != mPendingFrameDropList.end(); i++) {
5746 dprintf(fd, " %5d | %9d \n",
5747 i->frame_number, i->stream_ID);
5748 }
5749 dprintf(fd, "-------+-----------\n");
5750
5751 dprintf(fd, "\n Camera HAL3 information End \n");
5752
5753 /* use dumpsys media.camera as trigger to send update debug level event */
5754 mUpdateDebugLevel = true;
5755 pthread_mutex_unlock(&mMutex);
5756 return;
5757}
5758
5759/*===========================================================================
5760 * FUNCTION : flush
5761 *
5762 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5763 * conditionally restarts channels
5764 *
5765 * PARAMETERS :
5766 * @ restartChannels: re-start all channels
5767 *
5768 *
5769 * RETURN :
5770 * 0 on success
5771 * Error code on failure
5772 *==========================================================================*/
5773int QCamera3HardwareInterface::flush(bool restartChannels)
5774{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005775 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005776 int32_t rc = NO_ERROR;
5777
5778 LOGD("Unblocking Process Capture Request");
5779 pthread_mutex_lock(&mMutex);
5780 mFlush = true;
5781 pthread_mutex_unlock(&mMutex);
5782
5783 rc = stopAllChannels();
5784 // unlink of dualcam
5785 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005786 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5787 &m_pDualCamCmdPtr->bundle_info;
5788 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005789 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5790 pthread_mutex_lock(&gCamLock);
5791
5792 if (mIsMainCamera == 1) {
5793 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5794 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005795 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005796 // related session id should be session id of linked session
5797 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5798 } else {
5799 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5800 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005801 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005802 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5803 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005804 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005805 pthread_mutex_unlock(&gCamLock);
5806
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005807 rc = mCameraHandle->ops->set_dual_cam_cmd(
5808 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005809 if (rc < 0) {
5810 LOGE("Dualcam: Unlink failed, but still proceed to close");
5811 }
5812 }
5813
5814 if (rc < 0) {
5815 LOGE("stopAllChannels failed");
5816 return rc;
5817 }
5818 if (mChannelHandle) {
5819 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5820 mChannelHandle);
5821 }
5822
5823 // Reset bundle info
5824 rc = setBundleInfo();
5825 if (rc < 0) {
5826 LOGE("setBundleInfo failed %d", rc);
5827 return rc;
5828 }
5829
5830 // Mutex Lock
5831 pthread_mutex_lock(&mMutex);
5832
5833 // Unblock process_capture_request
5834 mPendingLiveRequest = 0;
5835 pthread_cond_signal(&mRequestCond);
5836
5837 rc = notifyErrorForPendingRequests();
5838 if (rc < 0) {
5839 LOGE("notifyErrorForPendingRequests failed");
5840 pthread_mutex_unlock(&mMutex);
5841 return rc;
5842 }
5843
5844 mFlush = false;
5845
5846 // Start the Streams/Channels
5847 if (restartChannels) {
5848 rc = startAllChannels();
5849 if (rc < 0) {
5850 LOGE("startAllChannels failed");
5851 pthread_mutex_unlock(&mMutex);
5852 return rc;
5853 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005854 if (mChannelHandle) {
5855 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5856 mChannelHandle);
5857 if (rc < 0) {
5858 LOGE("start_channel failed");
5859 pthread_mutex_unlock(&mMutex);
5860 return rc;
5861 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005862 }
5863 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005864 pthread_mutex_unlock(&mMutex);
5865
5866 return 0;
5867}
5868
5869/*===========================================================================
5870 * FUNCTION : flushPerf
5871 *
5872 * DESCRIPTION: This is the performance optimization version of flush that does
5873 * not use stream off, rather flushes the system
5874 *
5875 * PARAMETERS :
5876 *
5877 *
5878 * RETURN : 0 : success
5879 * -EINVAL: input is malformed (device is not valid)
5880 * -ENODEV: if the device has encountered a serious error
5881 *==========================================================================*/
5882int QCamera3HardwareInterface::flushPerf()
5883{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005884 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005885 int32_t rc = 0;
5886 struct timespec timeout;
5887 bool timed_wait = false;
5888
5889 pthread_mutex_lock(&mMutex);
5890 mFlushPerf = true;
5891 mPendingBuffersMap.numPendingBufsAtFlush =
5892 mPendingBuffersMap.get_num_overall_buffers();
5893 LOGD("Calling flush. Wait for %d buffers to return",
5894 mPendingBuffersMap.numPendingBufsAtFlush);
5895
5896 /* send the flush event to the backend */
5897 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5898 if (rc < 0) {
5899 LOGE("Error in flush: IOCTL failure");
5900 mFlushPerf = false;
5901 pthread_mutex_unlock(&mMutex);
5902 return -ENODEV;
5903 }
5904
5905 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5906 LOGD("No pending buffers in HAL, return flush");
5907 mFlushPerf = false;
5908 pthread_mutex_unlock(&mMutex);
5909 return rc;
5910 }
5911
5912 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005913 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005914 if (rc < 0) {
5915 LOGE("Error reading the real time clock, cannot use timed wait");
5916 } else {
5917 timeout.tv_sec += FLUSH_TIMEOUT;
5918 timed_wait = true;
5919 }
5920
5921 //Block on conditional variable
5922 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5923 LOGD("Waiting on mBuffersCond");
5924 if (!timed_wait) {
5925 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5926 if (rc != 0) {
5927 LOGE("pthread_cond_wait failed due to rc = %s",
5928 strerror(rc));
5929 break;
5930 }
5931 } else {
5932 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5933 if (rc != 0) {
5934 LOGE("pthread_cond_timedwait failed due to rc = %s",
5935 strerror(rc));
5936 break;
5937 }
5938 }
5939 }
5940 if (rc != 0) {
5941 mFlushPerf = false;
5942 pthread_mutex_unlock(&mMutex);
5943 return -ENODEV;
5944 }
5945
5946 LOGD("Received buffers, now safe to return them");
5947
5948 //make sure the channels handle flush
5949 //currently only required for the picture channel to release snapshot resources
5950 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5951 it != mStreamInfo.end(); it++) {
5952 QCamera3Channel *channel = (*it)->channel;
5953 if (channel) {
5954 rc = channel->flush();
5955 if (rc) {
5956 LOGE("Flushing the channels failed with error %d", rc);
5957 // even though the channel flush failed we need to continue and
5958 // return the buffers we have to the framework, however the return
5959 // value will be an error
5960 rc = -ENODEV;
5961 }
5962 }
5963 }
5964
5965 /* notify the frameworks and send errored results */
5966 rc = notifyErrorForPendingRequests();
5967 if (rc < 0) {
5968 LOGE("notifyErrorForPendingRequests failed");
5969 pthread_mutex_unlock(&mMutex);
5970 return rc;
5971 }
5972
5973 //unblock process_capture_request
5974 mPendingLiveRequest = 0;
5975 unblockRequestIfNecessary();
5976
5977 mFlushPerf = false;
5978 pthread_mutex_unlock(&mMutex);
5979 LOGD ("Flush Operation complete. rc = %d", rc);
5980 return rc;
5981}
5982
5983/*===========================================================================
5984 * FUNCTION : handleCameraDeviceError
5985 *
5986 * DESCRIPTION: This function calls internal flush and notifies the error to
5987 * framework and updates the state variable.
5988 *
5989 * PARAMETERS : None
5990 *
5991 * RETURN : NO_ERROR on Success
5992 * Error code on failure
5993 *==========================================================================*/
5994int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5995{
5996 int32_t rc = NO_ERROR;
5997
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005998 {
5999 Mutex::Autolock lock(mFlushLock);
6000 pthread_mutex_lock(&mMutex);
6001 if (mState != ERROR) {
6002 //if mState != ERROR, nothing to be done
6003 pthread_mutex_unlock(&mMutex);
6004 return NO_ERROR;
6005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006006 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006007
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006008 rc = flush(false /* restart channels */);
6009 if (NO_ERROR != rc) {
6010 LOGE("internal flush to handle mState = ERROR failed");
6011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006012
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006013 pthread_mutex_lock(&mMutex);
6014 mState = DEINIT;
6015 pthread_mutex_unlock(&mMutex);
6016 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006017
6018 camera3_notify_msg_t notify_msg;
6019 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6020 notify_msg.type = CAMERA3_MSG_ERROR;
6021 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6022 notify_msg.message.error.error_stream = NULL;
6023 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006024 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006025
6026 return rc;
6027}
6028
6029/*===========================================================================
6030 * FUNCTION : captureResultCb
6031 *
6032 * DESCRIPTION: Callback handler for all capture result
6033 * (streams, as well as metadata)
6034 *
6035 * PARAMETERS :
6036 * @metadata : metadata information
6037 * @buffer : actual gralloc buffer to be returned to frameworks.
6038 * NULL if metadata.
6039 *
6040 * RETURN : NONE
6041 *==========================================================================*/
6042void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6043 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6044{
6045 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006046 pthread_mutex_lock(&mMutex);
6047 uint8_t batchSize = mBatchSize;
6048 pthread_mutex_unlock(&mMutex);
6049 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006050 handleBatchMetadata(metadata_buf,
6051 true /* free_and_bufdone_meta_buf */);
6052 } else { /* mBatchSize = 0 */
6053 hdrPlusPerfLock(metadata_buf);
6054 pthread_mutex_lock(&mMutex);
6055 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006056 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006057 true /* last urgent frame of batch metadata */,
6058 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006059 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006060 pthread_mutex_unlock(&mMutex);
6061 }
6062 } else if (isInputBuffer) {
6063 pthread_mutex_lock(&mMutex);
6064 handleInputBufferWithLock(frame_number);
6065 pthread_mutex_unlock(&mMutex);
6066 } else {
6067 pthread_mutex_lock(&mMutex);
6068 handleBufferWithLock(buffer, frame_number);
6069 pthread_mutex_unlock(&mMutex);
6070 }
6071 return;
6072}
6073
6074/*===========================================================================
6075 * FUNCTION : getReprocessibleOutputStreamId
6076 *
6077 * DESCRIPTION: Get source output stream id for the input reprocess stream
6078 * based on size and format, which would be the largest
6079 * output stream if an input stream exists.
6080 *
6081 * PARAMETERS :
6082 * @id : return the stream id if found
6083 *
6084 * RETURN : int32_t type of status
6085 * NO_ERROR -- success
6086 * none-zero failure code
6087 *==========================================================================*/
6088int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6089{
6090 /* check if any output or bidirectional stream with the same size and format
6091 and return that stream */
6092 if ((mInputStreamInfo.dim.width > 0) &&
6093 (mInputStreamInfo.dim.height > 0)) {
6094 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6095 it != mStreamInfo.end(); it++) {
6096
6097 camera3_stream_t *stream = (*it)->stream;
6098 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6099 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6100 (stream->format == mInputStreamInfo.format)) {
6101 // Usage flag for an input stream and the source output stream
6102 // may be different.
6103 LOGD("Found reprocessible output stream! %p", *it);
6104 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6105 stream->usage, mInputStreamInfo.usage);
6106
6107 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6108 if (channel != NULL && channel->mStreams[0]) {
6109 id = channel->mStreams[0]->getMyServerID();
6110 return NO_ERROR;
6111 }
6112 }
6113 }
6114 } else {
6115 LOGD("No input stream, so no reprocessible output stream");
6116 }
6117 return NAME_NOT_FOUND;
6118}
6119
6120/*===========================================================================
6121 * FUNCTION : lookupFwkName
6122 *
6123 * DESCRIPTION: In case the enum is not same in fwk and backend
6124 * make sure the parameter is correctly propogated
6125 *
6126 * PARAMETERS :
6127 * @arr : map between the two enums
6128 * @len : len of the map
6129 * @hal_name : name of the hal_parm to map
6130 *
6131 * RETURN : int type of status
6132 * fwk_name -- success
6133 * none-zero failure code
6134 *==========================================================================*/
6135template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6136 size_t len, halType hal_name)
6137{
6138
6139 for (size_t i = 0; i < len; i++) {
6140 if (arr[i].hal_name == hal_name) {
6141 return arr[i].fwk_name;
6142 }
6143 }
6144
6145 /* Not able to find matching framework type is not necessarily
6146 * an error case. This happens when mm-camera supports more attributes
6147 * than the frameworks do */
6148 LOGH("Cannot find matching framework type");
6149 return NAME_NOT_FOUND;
6150}
6151
6152/*===========================================================================
6153 * FUNCTION : lookupHalName
6154 *
6155 * DESCRIPTION: In case the enum is not same in fwk and backend
6156 * make sure the parameter is correctly propogated
6157 *
6158 * PARAMETERS :
6159 * @arr : map between the two enums
6160 * @len : len of the map
6161 * @fwk_name : name of the hal_parm to map
6162 *
6163 * RETURN : int32_t type of status
6164 * hal_name -- success
6165 * none-zero failure code
6166 *==========================================================================*/
6167template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6168 size_t len, fwkType fwk_name)
6169{
6170 for (size_t i = 0; i < len; i++) {
6171 if (arr[i].fwk_name == fwk_name) {
6172 return arr[i].hal_name;
6173 }
6174 }
6175
6176 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6177 return NAME_NOT_FOUND;
6178}
6179
6180/*===========================================================================
6181 * FUNCTION : lookupProp
6182 *
6183 * DESCRIPTION: lookup a value by its name
6184 *
6185 * PARAMETERS :
6186 * @arr : map between the two enums
6187 * @len : size of the map
6188 * @name : name to be looked up
6189 *
6190 * RETURN : Value if found
6191 * CAM_CDS_MODE_MAX if not found
6192 *==========================================================================*/
6193template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6194 size_t len, const char *name)
6195{
6196 if (name) {
6197 for (size_t i = 0; i < len; i++) {
6198 if (!strcmp(arr[i].desc, name)) {
6199 return arr[i].val;
6200 }
6201 }
6202 }
6203 return CAM_CDS_MODE_MAX;
6204}
6205
6206/*===========================================================================
6207 *
6208 * DESCRIPTION:
6209 *
6210 * PARAMETERS :
6211 * @metadata : metadata information from callback
6212 * @timestamp: metadata buffer timestamp
6213 * @request_id: request id
6214 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006215 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006216 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6217 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006218 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006219 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6220 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 *
6222 * RETURN : camera_metadata_t*
6223 * metadata in a format specified by fwk
6224 *==========================================================================*/
6225camera_metadata_t*
6226QCamera3HardwareInterface::translateFromHalMetadata(
6227 metadata_buffer_t *metadata,
6228 nsecs_t timestamp,
6229 int32_t request_id,
6230 const CameraMetadata& jpegMetadata,
6231 uint8_t pipeline_depth,
6232 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006233 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006234 /* DevCamDebug metadata translateFromHalMetadata argument */
6235 uint8_t DevCamDebug_meta_enable,
6236 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006238 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006239 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006240{
6241 CameraMetadata camMetadata;
6242 camera_metadata_t *resultMetadata;
6243
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006244 if (!lastMetadataInBatch) {
6245 /* In batch mode, use empty metadata if this is not the last in batch*/
6246 resultMetadata = allocate_camera_metadata(0, 0);
6247 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006248 }
6249
Thierry Strudel3d639192016-09-09 11:52:26 -07006250 if (jpegMetadata.entryCount())
6251 camMetadata.append(jpegMetadata);
6252
6253 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6254 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6255 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6256 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006257 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006258 if (mBatchSize == 0) {
6259 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6260 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6261 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006262
Samuel Ha68ba5172016-12-15 18:41:12 -08006263 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6264 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6265 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6266 // DevCamDebug metadata translateFromHalMetadata AF
6267 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6268 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6269 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6270 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6271 }
6272 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6273 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6274 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6275 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6276 }
6277 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6278 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6279 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6280 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6281 }
6282 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6283 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6284 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6285 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6286 }
6287 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6288 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6289 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6290 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6291 }
6292 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6293 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6294 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6295 *DevCamDebug_af_monitor_pdaf_target_pos;
6296 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6297 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6298 }
6299 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6300 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6301 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6302 *DevCamDebug_af_monitor_pdaf_confidence;
6303 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6304 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6305 }
6306 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6307 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6308 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6309 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6310 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6311 }
6312 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6313 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6314 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6315 *DevCamDebug_af_monitor_tof_target_pos;
6316 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6317 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6318 }
6319 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6320 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6321 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6322 *DevCamDebug_af_monitor_tof_confidence;
6323 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6324 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6325 }
6326 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6327 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6328 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6329 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6330 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6331 }
6332 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6333 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6334 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6335 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6336 &fwk_DevCamDebug_af_monitor_type_select, 1);
6337 }
6338 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6339 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6340 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6341 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6342 &fwk_DevCamDebug_af_monitor_refocus, 1);
6343 }
6344 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6345 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6346 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6347 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6348 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6349 }
6350 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6351 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6352 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6353 *DevCamDebug_af_search_pdaf_target_pos;
6354 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6355 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6356 }
6357 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6358 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6359 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6360 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6361 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6362 }
6363 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6364 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6365 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6366 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6367 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6368 }
6369 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6370 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6371 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6372 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6373 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6374 }
6375 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6376 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6377 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6378 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6379 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6380 }
6381 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6382 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6383 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6384 *DevCamDebug_af_search_tof_target_pos;
6385 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6386 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6387 }
6388 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6389 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6390 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6391 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6392 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6393 }
6394 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6395 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6396 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6397 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6398 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6399 }
6400 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6401 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6402 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6403 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6404 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6405 }
6406 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6407 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6408 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6409 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6410 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6411 }
6412 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6413 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6414 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6415 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6416 &fwk_DevCamDebug_af_search_type_select, 1);
6417 }
6418 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6419 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6420 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6421 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6422 &fwk_DevCamDebug_af_search_next_pos, 1);
6423 }
6424 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6425 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6426 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6427 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6428 &fwk_DevCamDebug_af_search_target_pos, 1);
6429 }
6430 // DevCamDebug metadata translateFromHalMetadata AEC
6431 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6432 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6433 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6434 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6435 }
6436 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6437 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6438 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6439 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6440 }
6441 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6442 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6443 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6444 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6445 }
6446 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6447 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6448 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6449 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6450 }
6451 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6452 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6453 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6454 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6455 }
6456 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6457 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6458 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6459 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6460 }
6461 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6462 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6463 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6464 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6465 }
6466 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6467 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6468 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6469 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6470 }
Samuel Ha34229982017-02-17 13:51:11 -08006471 // DevCamDebug metadata translateFromHalMetadata zzHDR
6472 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6473 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6474 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6475 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6476 }
6477 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6478 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6479 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6480 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6481 }
6482 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6483 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6484 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6485 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6486 }
6487 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6488 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6489 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6490 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6491 }
6492 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6493 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6494 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6495 *DevCamDebug_aec_hdr_sensitivity_ratio;
6496 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6497 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6498 }
6499 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6500 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6501 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6502 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6503 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6504 }
6505 // DevCamDebug metadata translateFromHalMetadata ADRC
6506 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6507 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6508 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6509 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6510 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6511 }
6512 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6513 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6514 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6515 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6516 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6517 }
6518 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6519 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6520 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6521 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6522 }
6523 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6524 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6525 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6526 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6527 }
6528 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6529 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6530 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6531 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6532 }
6533 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6534 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6535 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6536 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6537 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006538 // DevCamDebug metadata translateFromHalMetadata AWB
6539 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6540 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6541 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6542 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6543 }
6544 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6545 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6546 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6547 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6548 }
6549 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6550 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6551 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6552 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6553 }
6554 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6555 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6556 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6557 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6558 }
6559 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6560 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6561 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6562 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6563 }
6564 }
6565 // atrace_end(ATRACE_TAG_ALWAYS);
6566
Thierry Strudel3d639192016-09-09 11:52:26 -07006567 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6568 int64_t fwk_frame_number = *frame_number;
6569 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6570 }
6571
6572 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6573 int32_t fps_range[2];
6574 fps_range[0] = (int32_t)float_range->min_fps;
6575 fps_range[1] = (int32_t)float_range->max_fps;
6576 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6577 fps_range, 2);
6578 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6579 fps_range[0], fps_range[1]);
6580 }
6581
6582 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6583 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6584 }
6585
6586 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6587 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6588 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6589 *sceneMode);
6590 if (NAME_NOT_FOUND != val) {
6591 uint8_t fwkSceneMode = (uint8_t)val;
6592 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6593 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6594 fwkSceneMode);
6595 }
6596 }
6597
6598 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6599 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6600 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6601 }
6602
6603 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6604 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6605 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6606 }
6607
6608 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6609 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6610 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6611 }
6612
6613 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6614 CAM_INTF_META_EDGE_MODE, metadata) {
6615 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6616 }
6617
6618 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6619 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6620 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6621 }
6622
6623 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6624 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6625 }
6626
6627 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6628 if (0 <= *flashState) {
6629 uint8_t fwk_flashState = (uint8_t) *flashState;
6630 if (!gCamCapability[mCameraId]->flash_available) {
6631 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6632 }
6633 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6634 }
6635 }
6636
6637 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6638 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6639 if (NAME_NOT_FOUND != val) {
6640 uint8_t fwk_flashMode = (uint8_t)val;
6641 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6642 }
6643 }
6644
6645 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6646 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6647 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6648 }
6649
6650 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6651 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6652 }
6653
6654 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6655 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6656 }
6657
6658 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6659 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6660 }
6661
6662 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6663 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6664 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6665 }
6666
6667 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6668 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6669 LOGD("fwk_videoStab = %d", fwk_videoStab);
6670 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6671 } else {
6672 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6673 // and so hardcoding the Video Stab result to OFF mode.
6674 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6675 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006676 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006677 }
6678
6679 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6680 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6681 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6682 }
6683
6684 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6685 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6686 }
6687
Thierry Strudel3d639192016-09-09 11:52:26 -07006688 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6689 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006690 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006691
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006692 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6693 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006694
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006695 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006696 blackLevelAppliedPattern->cam_black_level[0],
6697 blackLevelAppliedPattern->cam_black_level[1],
6698 blackLevelAppliedPattern->cam_black_level[2],
6699 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006700 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6701 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006702
6703#ifndef USE_HAL_3_3
6704 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006705 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6706 // depth space.
6707 fwk_blackLevelInd[0] /= 4.0;
6708 fwk_blackLevelInd[1] /= 4.0;
6709 fwk_blackLevelInd[2] /= 4.0;
6710 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006711 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6712 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006713#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006714 }
6715
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006716#ifndef USE_HAL_3_3
6717 // Fixed whitelevel is used by ISP/Sensor
6718 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6719 &gCamCapability[mCameraId]->white_level, 1);
6720#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006721
6722 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6723 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6724 int32_t scalerCropRegion[4];
6725 scalerCropRegion[0] = hScalerCropRegion->left;
6726 scalerCropRegion[1] = hScalerCropRegion->top;
6727 scalerCropRegion[2] = hScalerCropRegion->width;
6728 scalerCropRegion[3] = hScalerCropRegion->height;
6729
6730 // Adjust crop region from sensor output coordinate system to active
6731 // array coordinate system.
6732 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6733 scalerCropRegion[2], scalerCropRegion[3]);
6734
6735 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6736 }
6737
6738 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6739 LOGD("sensorExpTime = %lld", *sensorExpTime);
6740 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6741 }
6742
6743 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6744 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6745 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6746 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6747 }
6748
6749 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6750 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6751 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6752 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6753 sensorRollingShutterSkew, 1);
6754 }
6755
6756 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6757 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6758 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6759
6760 //calculate the noise profile based on sensitivity
6761 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6762 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6763 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6764 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6765 noise_profile[i] = noise_profile_S;
6766 noise_profile[i+1] = noise_profile_O;
6767 }
6768 LOGD("noise model entry (S, O) is (%f, %f)",
6769 noise_profile_S, noise_profile_O);
6770 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6771 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6772 }
6773
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006774#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006775 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006776 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006777 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006778 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006779 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6780 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6781 }
6782 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006783#endif
6784
Thierry Strudel3d639192016-09-09 11:52:26 -07006785 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6786 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6787 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6788 }
6789
6790 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6791 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6792 *faceDetectMode);
6793 if (NAME_NOT_FOUND != val) {
6794 uint8_t fwk_faceDetectMode = (uint8_t)val;
6795 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6796
6797 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6798 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6799 CAM_INTF_META_FACE_DETECTION, metadata) {
6800 uint8_t numFaces = MIN(
6801 faceDetectionInfo->num_faces_detected, MAX_ROI);
6802 int32_t faceIds[MAX_ROI];
6803 uint8_t faceScores[MAX_ROI];
6804 int32_t faceRectangles[MAX_ROI * 4];
6805 int32_t faceLandmarks[MAX_ROI * 6];
6806 size_t j = 0, k = 0;
6807
6808 for (size_t i = 0; i < numFaces; i++) {
6809 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6810 // Adjust crop region from sensor output coordinate system to active
6811 // array coordinate system.
6812 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6813 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6814 rect.width, rect.height);
6815
6816 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6817 faceRectangles+j, -1);
6818
6819 j+= 4;
6820 }
6821 if (numFaces <= 0) {
6822 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6823 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6824 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6825 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6826 }
6827
6828 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6829 numFaces);
6830 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6831 faceRectangles, numFaces * 4U);
6832 if (fwk_faceDetectMode ==
6833 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6834 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6835 CAM_INTF_META_FACE_LANDMARK, metadata) {
6836
6837 for (size_t i = 0; i < numFaces; i++) {
6838 // Map the co-ordinate sensor output coordinate system to active
6839 // array coordinate system.
6840 mCropRegionMapper.toActiveArray(
6841 landmarks->face_landmarks[i].left_eye_center.x,
6842 landmarks->face_landmarks[i].left_eye_center.y);
6843 mCropRegionMapper.toActiveArray(
6844 landmarks->face_landmarks[i].right_eye_center.x,
6845 landmarks->face_landmarks[i].right_eye_center.y);
6846 mCropRegionMapper.toActiveArray(
6847 landmarks->face_landmarks[i].mouth_center.x,
6848 landmarks->face_landmarks[i].mouth_center.y);
6849
6850 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006851 k+= TOTAL_LANDMARK_INDICES;
6852 }
6853 } else {
6854 for (size_t i = 0; i < numFaces; i++) {
6855 setInvalidLandmarks(faceLandmarks+k);
6856 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006857 }
6858 }
6859
6860 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6861 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6862 faceLandmarks, numFaces * 6U);
6863 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006864 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6865 CAM_INTF_META_FACE_BLINK, metadata) {
6866 uint8_t detected[MAX_ROI];
6867 uint8_t degree[MAX_ROI * 2];
6868 for (size_t i = 0; i < numFaces; i++) {
6869 detected[i] = blinks->blink[i].blink_detected;
6870 degree[2 * i] = blinks->blink[i].left_blink;
6871 degree[2 * i + 1] = blinks->blink[i].right_blink;
6872 }
6873 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6874 detected, numFaces);
6875 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6876 degree, numFaces * 2);
6877 }
6878 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6879 CAM_INTF_META_FACE_SMILE, metadata) {
6880 uint8_t degree[MAX_ROI];
6881 uint8_t confidence[MAX_ROI];
6882 for (size_t i = 0; i < numFaces; i++) {
6883 degree[i] = smiles->smile[i].smile_degree;
6884 confidence[i] = smiles->smile[i].smile_confidence;
6885 }
6886 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6887 degree, numFaces);
6888 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6889 confidence, numFaces);
6890 }
6891 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6892 CAM_INTF_META_FACE_GAZE, metadata) {
6893 int8_t angle[MAX_ROI];
6894 int32_t direction[MAX_ROI * 3];
6895 int8_t degree[MAX_ROI * 2];
6896 for (size_t i = 0; i < numFaces; i++) {
6897 angle[i] = gazes->gaze[i].gaze_angle;
6898 direction[3 * i] = gazes->gaze[i].updown_dir;
6899 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6900 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6901 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6902 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6903 }
6904 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6905 (uint8_t *)angle, numFaces);
6906 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6907 direction, numFaces * 3);
6908 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6909 (uint8_t *)degree, numFaces * 2);
6910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006911 }
6912 }
6913 }
6914 }
6915
6916 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6917 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08006918 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006919 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08006920 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006921
Shuzhen Wang14415f52016-11-16 18:26:18 -08006922 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
6923 histogramBins = *histBins;
6924 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
6925 }
6926
6927 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006928 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6929 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08006930 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006931
6932 switch (stats_data->type) {
6933 case CAM_HISTOGRAM_TYPE_BAYER:
6934 switch (stats_data->bayer_stats.data_type) {
6935 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006936 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
6937 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006938 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006939 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
6940 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006941 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006942 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
6943 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006944 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006945 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006946 case CAM_STATS_CHANNEL_R:
6947 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006948 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
6949 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006950 }
6951 break;
6952 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006953 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006954 break;
6955 }
6956
Shuzhen Wang14415f52016-11-16 18:26:18 -08006957 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006958 }
6959 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006960 }
6961
6962 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6963 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6964 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6965 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6966 }
6967
6968 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6969 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6970 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6971 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6972 }
6973
6974 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6975 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6976 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6977 CAM_MAX_SHADING_MAP_HEIGHT);
6978 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6979 CAM_MAX_SHADING_MAP_WIDTH);
6980 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6981 lensShadingMap->lens_shading, 4U * map_width * map_height);
6982 }
6983
6984 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6985 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6986 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6987 }
6988
6989 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6990 //Populate CAM_INTF_META_TONEMAP_CURVES
6991 /* ch0 = G, ch 1 = B, ch 2 = R*/
6992 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6993 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6994 tonemap->tonemap_points_cnt,
6995 CAM_MAX_TONEMAP_CURVE_SIZE);
6996 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6997 }
6998
6999 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7000 &tonemap->curves[0].tonemap_points[0][0],
7001 tonemap->tonemap_points_cnt * 2);
7002
7003 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7004 &tonemap->curves[1].tonemap_points[0][0],
7005 tonemap->tonemap_points_cnt * 2);
7006
7007 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7008 &tonemap->curves[2].tonemap_points[0][0],
7009 tonemap->tonemap_points_cnt * 2);
7010 }
7011
7012 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7013 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7014 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7015 CC_GAIN_MAX);
7016 }
7017
7018 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7019 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7020 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7021 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7022 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7023 }
7024
7025 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7026 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7027 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7028 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7029 toneCurve->tonemap_points_cnt,
7030 CAM_MAX_TONEMAP_CURVE_SIZE);
7031 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7032 }
7033 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7034 (float*)toneCurve->curve.tonemap_points,
7035 toneCurve->tonemap_points_cnt * 2);
7036 }
7037
7038 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7039 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7040 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7041 predColorCorrectionGains->gains, 4);
7042 }
7043
7044 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7045 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7046 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7047 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7048 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7049 }
7050
7051 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7052 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7053 }
7054
7055 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7056 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7057 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7058 }
7059
7060 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7061 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7062 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7066 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7067 *effectMode);
7068 if (NAME_NOT_FOUND != val) {
7069 uint8_t fwk_effectMode = (uint8_t)val;
7070 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7071 }
7072 }
7073
7074 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7075 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7076 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7077 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7078 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7079 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7080 }
7081 int32_t fwk_testPatternData[4];
7082 fwk_testPatternData[0] = testPatternData->r;
7083 fwk_testPatternData[3] = testPatternData->b;
7084 switch (gCamCapability[mCameraId]->color_arrangement) {
7085 case CAM_FILTER_ARRANGEMENT_RGGB:
7086 case CAM_FILTER_ARRANGEMENT_GRBG:
7087 fwk_testPatternData[1] = testPatternData->gr;
7088 fwk_testPatternData[2] = testPatternData->gb;
7089 break;
7090 case CAM_FILTER_ARRANGEMENT_GBRG:
7091 case CAM_FILTER_ARRANGEMENT_BGGR:
7092 fwk_testPatternData[2] = testPatternData->gr;
7093 fwk_testPatternData[1] = testPatternData->gb;
7094 break;
7095 default:
7096 LOGE("color arrangement %d is not supported",
7097 gCamCapability[mCameraId]->color_arrangement);
7098 break;
7099 }
7100 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7101 }
7102
7103 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7104 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7105 }
7106
7107 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7108 String8 str((const char *)gps_methods);
7109 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7110 }
7111
7112 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7113 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7114 }
7115
7116 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7117 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7118 }
7119
7120 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7121 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7122 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7123 }
7124
7125 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7126 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7127 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7128 }
7129
7130 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7131 int32_t fwk_thumb_size[2];
7132 fwk_thumb_size[0] = thumb_size->width;
7133 fwk_thumb_size[1] = thumb_size->height;
7134 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7135 }
7136
7137 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7138 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7139 privateData,
7140 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7141 }
7142
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007143 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007144 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007145 meteringMode, 1);
7146 }
7147
Thierry Strudel54dc9782017-02-15 12:12:10 -08007148 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7149 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7150 LOGD("hdr_scene_data: %d %f\n",
7151 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7152 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7153 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7154 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7155 &isHdr, 1);
7156 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7157 &isHdrConfidence, 1);
7158 }
7159
7160
7161
Thierry Strudel3d639192016-09-09 11:52:26 -07007162 if (metadata->is_tuning_params_valid) {
7163 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7164 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7165 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7166
7167
7168 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7169 sizeof(uint32_t));
7170 data += sizeof(uint32_t);
7171
7172 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7173 sizeof(uint32_t));
7174 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7175 data += sizeof(uint32_t);
7176
7177 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7178 sizeof(uint32_t));
7179 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7180 data += sizeof(uint32_t);
7181
7182 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7183 sizeof(uint32_t));
7184 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7185 data += sizeof(uint32_t);
7186
7187 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7188 sizeof(uint32_t));
7189 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7190 data += sizeof(uint32_t);
7191
7192 metadata->tuning_params.tuning_mod3_data_size = 0;
7193 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7194 sizeof(uint32_t));
7195 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7196 data += sizeof(uint32_t);
7197
7198 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7199 TUNING_SENSOR_DATA_MAX);
7200 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7201 count);
7202 data += count;
7203
7204 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7205 TUNING_VFE_DATA_MAX);
7206 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7207 count);
7208 data += count;
7209
7210 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7211 TUNING_CPP_DATA_MAX);
7212 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7213 count);
7214 data += count;
7215
7216 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7217 TUNING_CAC_DATA_MAX);
7218 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7219 count);
7220 data += count;
7221
7222 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7223 (int32_t *)(void *)tuning_meta_data_blob,
7224 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7225 }
7226
7227 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7228 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7229 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7230 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7231 NEUTRAL_COL_POINTS);
7232 }
7233
7234 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7235 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7236 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7237 }
7238
7239 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7240 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7241 // Adjust crop region from sensor output coordinate system to active
7242 // array coordinate system.
7243 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7244 hAeRegions->rect.width, hAeRegions->rect.height);
7245
7246 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7247 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7248 REGIONS_TUPLE_COUNT);
7249 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7250 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7251 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7252 hAeRegions->rect.height);
7253 }
7254
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007255 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7256 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7257 if (NAME_NOT_FOUND != val) {
7258 uint8_t fwkAfMode = (uint8_t)val;
7259 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7260 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7261 } else {
7262 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7263 val);
7264 }
7265 }
7266
Thierry Strudel3d639192016-09-09 11:52:26 -07007267 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7268 uint8_t fwk_afState = (uint8_t) *afState;
7269 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007270 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007271 }
7272
7273 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7274 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7275 }
7276
7277 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7278 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7279 }
7280
7281 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7282 uint8_t fwk_lensState = *lensState;
7283 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7284 }
7285
7286 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7287 /*af regions*/
7288 int32_t afRegions[REGIONS_TUPLE_COUNT];
7289 // Adjust crop region from sensor output coordinate system to active
7290 // array coordinate system.
7291 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7292 hAfRegions->rect.width, hAfRegions->rect.height);
7293
7294 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7295 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7296 REGIONS_TUPLE_COUNT);
7297 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7298 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7299 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7300 hAfRegions->rect.height);
7301 }
7302
7303 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007304 uint32_t ab_mode = *hal_ab_mode;
7305 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7306 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7307 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007309 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007310 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007311 if (NAME_NOT_FOUND != val) {
7312 uint8_t fwk_ab_mode = (uint8_t)val;
7313 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7314 }
7315 }
7316
7317 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7318 int val = lookupFwkName(SCENE_MODES_MAP,
7319 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7320 if (NAME_NOT_FOUND != val) {
7321 uint8_t fwkBestshotMode = (uint8_t)val;
7322 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7323 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7324 } else {
7325 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7326 }
7327 }
7328
7329 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7330 uint8_t fwk_mode = (uint8_t) *mode;
7331 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7332 }
7333
7334 /* Constant metadata values to be update*/
7335 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7336 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7337
7338 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7339 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7340
7341 int32_t hotPixelMap[2];
7342 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7343
7344 // CDS
7345 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7346 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7347 }
7348
Thierry Strudel04e026f2016-10-10 11:27:36 -07007349 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7350 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007351 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007352 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7353 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7354 } else {
7355 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7356 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007357
7358 if(fwk_hdr != curr_hdr_state) {
7359 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7360 if(fwk_hdr)
7361 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7362 else
7363 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7364 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007365 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7366 }
7367
Thierry Strudel54dc9782017-02-15 12:12:10 -08007368 //binning correction
7369 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7370 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7371 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7372 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7373 }
7374
Thierry Strudel04e026f2016-10-10 11:27:36 -07007375 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007376 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007377 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7378 int8_t is_ir_on = 0;
7379
7380 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7381 if(is_ir_on != curr_ir_state) {
7382 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7383 if(is_ir_on)
7384 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7385 else
7386 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7387 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007388 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007389 }
7390
Thierry Strudel269c81a2016-10-12 12:13:59 -07007391 // AEC SPEED
7392 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7393 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7394 }
7395
7396 // AWB SPEED
7397 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7398 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7399 }
7400
Thierry Strudel3d639192016-09-09 11:52:26 -07007401 // TNR
7402 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7403 uint8_t tnr_enable = tnr->denoise_enable;
7404 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007405 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7406 int8_t is_tnr_on = 0;
7407
7408 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7409 if(is_tnr_on != curr_tnr_state) {
7410 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7411 if(is_tnr_on)
7412 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7413 else
7414 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7415 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007416
7417 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7418 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7419 }
7420
7421 // Reprocess crop data
7422 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7423 uint8_t cnt = crop_data->num_of_streams;
7424 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7425 // mm-qcamera-daemon only posts crop_data for streams
7426 // not linked to pproc. So no valid crop metadata is not
7427 // necessarily an error case.
7428 LOGD("No valid crop metadata entries");
7429 } else {
7430 uint32_t reproc_stream_id;
7431 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7432 LOGD("No reprocessible stream found, ignore crop data");
7433 } else {
7434 int rc = NO_ERROR;
7435 Vector<int32_t> roi_map;
7436 int32_t *crop = new int32_t[cnt*4];
7437 if (NULL == crop) {
7438 rc = NO_MEMORY;
7439 }
7440 if (NO_ERROR == rc) {
7441 int32_t streams_found = 0;
7442 for (size_t i = 0; i < cnt; i++) {
7443 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7444 if (pprocDone) {
7445 // HAL already does internal reprocessing,
7446 // either via reprocessing before JPEG encoding,
7447 // or offline postprocessing for pproc bypass case.
7448 crop[0] = 0;
7449 crop[1] = 0;
7450 crop[2] = mInputStreamInfo.dim.width;
7451 crop[3] = mInputStreamInfo.dim.height;
7452 } else {
7453 crop[0] = crop_data->crop_info[i].crop.left;
7454 crop[1] = crop_data->crop_info[i].crop.top;
7455 crop[2] = crop_data->crop_info[i].crop.width;
7456 crop[3] = crop_data->crop_info[i].crop.height;
7457 }
7458 roi_map.add(crop_data->crop_info[i].roi_map.left);
7459 roi_map.add(crop_data->crop_info[i].roi_map.top);
7460 roi_map.add(crop_data->crop_info[i].roi_map.width);
7461 roi_map.add(crop_data->crop_info[i].roi_map.height);
7462 streams_found++;
7463 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7464 crop[0], crop[1], crop[2], crop[3]);
7465 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7466 crop_data->crop_info[i].roi_map.left,
7467 crop_data->crop_info[i].roi_map.top,
7468 crop_data->crop_info[i].roi_map.width,
7469 crop_data->crop_info[i].roi_map.height);
7470 break;
7471
7472 }
7473 }
7474 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7475 &streams_found, 1);
7476 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7477 crop, (size_t)(streams_found * 4));
7478 if (roi_map.array()) {
7479 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7480 roi_map.array(), roi_map.size());
7481 }
7482 }
7483 if (crop) {
7484 delete [] crop;
7485 }
7486 }
7487 }
7488 }
7489
7490 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7491 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7492 // so hardcoding the CAC result to OFF mode.
7493 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7494 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7495 } else {
7496 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7497 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7498 *cacMode);
7499 if (NAME_NOT_FOUND != val) {
7500 uint8_t resultCacMode = (uint8_t)val;
7501 // check whether CAC result from CB is equal to Framework set CAC mode
7502 // If not equal then set the CAC mode came in corresponding request
7503 if (fwk_cacMode != resultCacMode) {
7504 resultCacMode = fwk_cacMode;
7505 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007506 //Check if CAC is disabled by property
7507 if (m_cacModeDisabled) {
7508 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7509 }
7510
Thierry Strudel3d639192016-09-09 11:52:26 -07007511 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7512 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7513 } else {
7514 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7515 }
7516 }
7517 }
7518
7519 // Post blob of cam_cds_data through vendor tag.
7520 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7521 uint8_t cnt = cdsInfo->num_of_streams;
7522 cam_cds_data_t cdsDataOverride;
7523 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7524 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7525 cdsDataOverride.num_of_streams = 1;
7526 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7527 uint32_t reproc_stream_id;
7528 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7529 LOGD("No reprocessible stream found, ignore cds data");
7530 } else {
7531 for (size_t i = 0; i < cnt; i++) {
7532 if (cdsInfo->cds_info[i].stream_id ==
7533 reproc_stream_id) {
7534 cdsDataOverride.cds_info[0].cds_enable =
7535 cdsInfo->cds_info[i].cds_enable;
7536 break;
7537 }
7538 }
7539 }
7540 } else {
7541 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7542 }
7543 camMetadata.update(QCAMERA3_CDS_INFO,
7544 (uint8_t *)&cdsDataOverride,
7545 sizeof(cam_cds_data_t));
7546 }
7547
7548 // Ldaf calibration data
7549 if (!mLdafCalibExist) {
7550 IF_META_AVAILABLE(uint32_t, ldafCalib,
7551 CAM_INTF_META_LDAF_EXIF, metadata) {
7552 mLdafCalibExist = true;
7553 mLdafCalib[0] = ldafCalib[0];
7554 mLdafCalib[1] = ldafCalib[1];
7555 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7556 ldafCalib[0], ldafCalib[1]);
7557 }
7558 }
7559
Thierry Strudel54dc9782017-02-15 12:12:10 -08007560 // EXIF debug data through vendor tag
7561 /*
7562 * Mobicat Mask can assume 3 values:
7563 * 1 refers to Mobicat data,
7564 * 2 refers to Stats Debug and Exif Debug Data
7565 * 3 refers to Mobicat and Stats Debug Data
7566 * We want to make sure that we are sending Exif debug data
7567 * only when Mobicat Mask is 2.
7568 */
7569 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7570 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7571 (uint8_t *)(void *)mExifParams.debug_params,
7572 sizeof(mm_jpeg_debug_exif_params_t));
7573 }
7574
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007575 // Reprocess and DDM debug data through vendor tag
7576 cam_reprocess_info_t repro_info;
7577 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007578 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7579 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007580 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007581 }
7582 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7583 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007584 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007585 }
7586 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7587 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007588 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007589 }
7590 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7591 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007592 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007593 }
7594 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7595 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007596 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007597 }
7598 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007599 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007600 }
7601 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7602 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007603 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007604 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007605 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7606 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7607 }
7608 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7609 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7610 }
7611 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7612 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007613
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007614 // INSTANT AEC MODE
7615 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7616 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7617 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7618 }
7619
Shuzhen Wange763e802016-03-31 10:24:29 -07007620 // AF scene change
7621 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7622 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7623 }
7624
Thierry Strudel3d639192016-09-09 11:52:26 -07007625 resultMetadata = camMetadata.release();
7626 return resultMetadata;
7627}
7628
7629/*===========================================================================
7630 * FUNCTION : saveExifParams
7631 *
7632 * DESCRIPTION:
7633 *
7634 * PARAMETERS :
7635 * @metadata : metadata information from callback
7636 *
7637 * RETURN : none
7638 *
7639 *==========================================================================*/
7640void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7641{
7642 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7643 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7644 if (mExifParams.debug_params) {
7645 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7646 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7647 }
7648 }
7649 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7650 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7651 if (mExifParams.debug_params) {
7652 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7653 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7654 }
7655 }
7656 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7657 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7658 if (mExifParams.debug_params) {
7659 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7660 mExifParams.debug_params->af_debug_params_valid = TRUE;
7661 }
7662 }
7663 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7664 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7665 if (mExifParams.debug_params) {
7666 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7667 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7668 }
7669 }
7670 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7671 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7672 if (mExifParams.debug_params) {
7673 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7674 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7675 }
7676 }
7677 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7678 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7679 if (mExifParams.debug_params) {
7680 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7681 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7682 }
7683 }
7684 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7685 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7686 if (mExifParams.debug_params) {
7687 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7688 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7689 }
7690 }
7691 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7692 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7693 if (mExifParams.debug_params) {
7694 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7695 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7696 }
7697 }
7698}
7699
7700/*===========================================================================
7701 * FUNCTION : get3AExifParams
7702 *
7703 * DESCRIPTION:
7704 *
7705 * PARAMETERS : none
7706 *
7707 *
7708 * RETURN : mm_jpeg_exif_params_t
7709 *
7710 *==========================================================================*/
7711mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7712{
7713 return mExifParams;
7714}
7715
7716/*===========================================================================
7717 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7718 *
7719 * DESCRIPTION:
7720 *
7721 * PARAMETERS :
7722 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007723 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7724 * urgent metadata in a batch. Always true for
7725 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007726 *
7727 * RETURN : camera_metadata_t*
7728 * metadata in a format specified by fwk
7729 *==========================================================================*/
7730camera_metadata_t*
7731QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007732 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007733{
7734 CameraMetadata camMetadata;
7735 camera_metadata_t *resultMetadata;
7736
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007737 if (!lastUrgentMetadataInBatch) {
7738 /* In batch mode, use empty metadata if this is not the last in batch
7739 */
7740 resultMetadata = allocate_camera_metadata(0, 0);
7741 return resultMetadata;
7742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007743
7744 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7745 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7746 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7747 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7748 }
7749
7750 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7751 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7752 &aecTrigger->trigger, 1);
7753 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7754 &aecTrigger->trigger_id, 1);
7755 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7756 aecTrigger->trigger);
7757 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7758 aecTrigger->trigger_id);
7759 }
7760
7761 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7762 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7763 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7764 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7765 }
7766
Thierry Strudel3d639192016-09-09 11:52:26 -07007767 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7768 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7769 &af_trigger->trigger, 1);
7770 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7771 af_trigger->trigger);
7772 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7773 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7774 af_trigger->trigger_id);
7775 }
7776
7777 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7778 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7779 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7780 if (NAME_NOT_FOUND != val) {
7781 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7782 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7783 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7784 } else {
7785 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7786 }
7787 }
7788
7789 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7790 uint32_t aeMode = CAM_AE_MODE_MAX;
7791 int32_t flashMode = CAM_FLASH_MODE_MAX;
7792 int32_t redeye = -1;
7793 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7794 aeMode = *pAeMode;
7795 }
7796 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7797 flashMode = *pFlashMode;
7798 }
7799 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7800 redeye = *pRedeye;
7801 }
7802
7803 if (1 == redeye) {
7804 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7805 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7806 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7807 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7808 flashMode);
7809 if (NAME_NOT_FOUND != val) {
7810 fwk_aeMode = (uint8_t)val;
7811 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7812 } else {
7813 LOGE("Unsupported flash mode %d", flashMode);
7814 }
7815 } else if (aeMode == CAM_AE_MODE_ON) {
7816 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7817 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7818 } else if (aeMode == CAM_AE_MODE_OFF) {
7819 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7820 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7821 } else {
7822 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7823 "flashMode:%d, aeMode:%u!!!",
7824 redeye, flashMode, aeMode);
7825 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007826 if (mInstantAEC) {
7827 // Increment frame Idx count untill a bound reached for instant AEC.
7828 mInstantAecFrameIdxCount++;
7829 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7830 CAM_INTF_META_AEC_INFO, metadata) {
7831 LOGH("ae_params->settled = %d",ae_params->settled);
7832 // If AEC settled, or if number of frames reached bound value,
7833 // should reset instant AEC.
7834 if (ae_params->settled ||
7835 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7836 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7837 mInstantAEC = false;
7838 mResetInstantAEC = true;
7839 mInstantAecFrameIdxCount = 0;
7840 }
7841 }
7842 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007843 resultMetadata = camMetadata.release();
7844 return resultMetadata;
7845}
7846
7847/*===========================================================================
7848 * FUNCTION : dumpMetadataToFile
7849 *
7850 * DESCRIPTION: Dumps tuning metadata to file system
7851 *
7852 * PARAMETERS :
7853 * @meta : tuning metadata
7854 * @dumpFrameCount : current dump frame count
7855 * @enabled : Enable mask
7856 *
7857 *==========================================================================*/
7858void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7859 uint32_t &dumpFrameCount,
7860 bool enabled,
7861 const char *type,
7862 uint32_t frameNumber)
7863{
7864 //Some sanity checks
7865 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7866 LOGE("Tuning sensor data size bigger than expected %d: %d",
7867 meta.tuning_sensor_data_size,
7868 TUNING_SENSOR_DATA_MAX);
7869 return;
7870 }
7871
7872 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7873 LOGE("Tuning VFE data size bigger than expected %d: %d",
7874 meta.tuning_vfe_data_size,
7875 TUNING_VFE_DATA_MAX);
7876 return;
7877 }
7878
7879 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7880 LOGE("Tuning CPP data size bigger than expected %d: %d",
7881 meta.tuning_cpp_data_size,
7882 TUNING_CPP_DATA_MAX);
7883 return;
7884 }
7885
7886 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7887 LOGE("Tuning CAC data size bigger than expected %d: %d",
7888 meta.tuning_cac_data_size,
7889 TUNING_CAC_DATA_MAX);
7890 return;
7891 }
7892 //
7893
7894 if(enabled){
7895 char timeBuf[FILENAME_MAX];
7896 char buf[FILENAME_MAX];
7897 memset(buf, 0, sizeof(buf));
7898 memset(timeBuf, 0, sizeof(timeBuf));
7899 time_t current_time;
7900 struct tm * timeinfo;
7901 time (&current_time);
7902 timeinfo = localtime (&current_time);
7903 if (timeinfo != NULL) {
7904 strftime (timeBuf, sizeof(timeBuf),
7905 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7906 }
7907 String8 filePath(timeBuf);
7908 snprintf(buf,
7909 sizeof(buf),
7910 "%dm_%s_%d.bin",
7911 dumpFrameCount,
7912 type,
7913 frameNumber);
7914 filePath.append(buf);
7915 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7916 if (file_fd >= 0) {
7917 ssize_t written_len = 0;
7918 meta.tuning_data_version = TUNING_DATA_VERSION;
7919 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7920 written_len += write(file_fd, data, sizeof(uint32_t));
7921 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7922 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7923 written_len += write(file_fd, data, sizeof(uint32_t));
7924 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7925 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7926 written_len += write(file_fd, data, sizeof(uint32_t));
7927 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7928 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7929 written_len += write(file_fd, data, sizeof(uint32_t));
7930 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7931 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7932 written_len += write(file_fd, data, sizeof(uint32_t));
7933 meta.tuning_mod3_data_size = 0;
7934 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7935 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7936 written_len += write(file_fd, data, sizeof(uint32_t));
7937 size_t total_size = meta.tuning_sensor_data_size;
7938 data = (void *)((uint8_t *)&meta.data);
7939 written_len += write(file_fd, data, total_size);
7940 total_size = meta.tuning_vfe_data_size;
7941 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7942 written_len += write(file_fd, data, total_size);
7943 total_size = meta.tuning_cpp_data_size;
7944 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7945 written_len += write(file_fd, data, total_size);
7946 total_size = meta.tuning_cac_data_size;
7947 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7948 written_len += write(file_fd, data, total_size);
7949 close(file_fd);
7950 }else {
7951 LOGE("fail to open file for metadata dumping");
7952 }
7953 }
7954}
7955
7956/*===========================================================================
7957 * FUNCTION : cleanAndSortStreamInfo
7958 *
7959 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7960 * and sort them such that raw stream is at the end of the list
7961 * This is a workaround for camera daemon constraint.
7962 *
7963 * PARAMETERS : None
7964 *
7965 *==========================================================================*/
7966void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7967{
7968 List<stream_info_t *> newStreamInfo;
7969
7970 /*clean up invalid streams*/
7971 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7972 it != mStreamInfo.end();) {
7973 if(((*it)->status) == INVALID){
7974 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7975 delete channel;
7976 free(*it);
7977 it = mStreamInfo.erase(it);
7978 } else {
7979 it++;
7980 }
7981 }
7982
7983 // Move preview/video/callback/snapshot streams into newList
7984 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7985 it != mStreamInfo.end();) {
7986 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7987 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7988 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7989 newStreamInfo.push_back(*it);
7990 it = mStreamInfo.erase(it);
7991 } else
7992 it++;
7993 }
7994 // Move raw streams into newList
7995 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7996 it != mStreamInfo.end();) {
7997 newStreamInfo.push_back(*it);
7998 it = mStreamInfo.erase(it);
7999 }
8000
8001 mStreamInfo = newStreamInfo;
8002}
8003
8004/*===========================================================================
8005 * FUNCTION : extractJpegMetadata
8006 *
8007 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8008 * JPEG metadata is cached in HAL, and return as part of capture
8009 * result when metadata is returned from camera daemon.
8010 *
8011 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8012 * @request: capture request
8013 *
8014 *==========================================================================*/
8015void QCamera3HardwareInterface::extractJpegMetadata(
8016 CameraMetadata& jpegMetadata,
8017 const camera3_capture_request_t *request)
8018{
8019 CameraMetadata frame_settings;
8020 frame_settings = request->settings;
8021
8022 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8023 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8024 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8025 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8026
8027 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8028 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8029 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8030 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8031
8032 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8033 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8034 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8035 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8036
8037 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8038 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8039 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8040 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8041
8042 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8043 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8044 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8045 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8046
8047 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8048 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8049 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8050 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8051
8052 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8053 int32_t thumbnail_size[2];
8054 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8055 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8056 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8057 int32_t orientation =
8058 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008059 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008060 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8061 int32_t temp;
8062 temp = thumbnail_size[0];
8063 thumbnail_size[0] = thumbnail_size[1];
8064 thumbnail_size[1] = temp;
8065 }
8066 }
8067 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8068 thumbnail_size,
8069 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8070 }
8071
8072}
8073
8074/*===========================================================================
8075 * FUNCTION : convertToRegions
8076 *
8077 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8078 *
8079 * PARAMETERS :
8080 * @rect : cam_rect_t struct to convert
8081 * @region : int32_t destination array
8082 * @weight : if we are converting from cam_area_t, weight is valid
8083 * else weight = -1
8084 *
8085 *==========================================================================*/
8086void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8087 int32_t *region, int weight)
8088{
8089 region[0] = rect.left;
8090 region[1] = rect.top;
8091 region[2] = rect.left + rect.width;
8092 region[3] = rect.top + rect.height;
8093 if (weight > -1) {
8094 region[4] = weight;
8095 }
8096}
8097
8098/*===========================================================================
8099 * FUNCTION : convertFromRegions
8100 *
8101 * DESCRIPTION: helper method to convert from array to cam_rect_t
8102 *
8103 * PARAMETERS :
8104 * @rect : cam_rect_t struct to convert
8105 * @region : int32_t destination array
8106 * @weight : if we are converting from cam_area_t, weight is valid
8107 * else weight = -1
8108 *
8109 *==========================================================================*/
8110void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008111 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008112{
Thierry Strudel3d639192016-09-09 11:52:26 -07008113 int32_t x_min = frame_settings.find(tag).data.i32[0];
8114 int32_t y_min = frame_settings.find(tag).data.i32[1];
8115 int32_t x_max = frame_settings.find(tag).data.i32[2];
8116 int32_t y_max = frame_settings.find(tag).data.i32[3];
8117 roi.weight = frame_settings.find(tag).data.i32[4];
8118 roi.rect.left = x_min;
8119 roi.rect.top = y_min;
8120 roi.rect.width = x_max - x_min;
8121 roi.rect.height = y_max - y_min;
8122}
8123
8124/*===========================================================================
8125 * FUNCTION : resetIfNeededROI
8126 *
8127 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8128 * crop region
8129 *
8130 * PARAMETERS :
8131 * @roi : cam_area_t struct to resize
8132 * @scalerCropRegion : cam_crop_region_t region to compare against
8133 *
8134 *
8135 *==========================================================================*/
8136bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8137 const cam_crop_region_t* scalerCropRegion)
8138{
8139 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8140 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8141 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8142 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8143
8144 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8145 * without having this check the calculations below to validate if the roi
8146 * is inside scalar crop region will fail resulting in the roi not being
8147 * reset causing algorithm to continue to use stale roi window
8148 */
8149 if (roi->weight == 0) {
8150 return true;
8151 }
8152
8153 if ((roi_x_max < scalerCropRegion->left) ||
8154 // right edge of roi window is left of scalar crop's left edge
8155 (roi_y_max < scalerCropRegion->top) ||
8156 // bottom edge of roi window is above scalar crop's top edge
8157 (roi->rect.left > crop_x_max) ||
8158 // left edge of roi window is beyond(right) of scalar crop's right edge
8159 (roi->rect.top > crop_y_max)){
8160 // top edge of roi windo is above scalar crop's top edge
8161 return false;
8162 }
8163 if (roi->rect.left < scalerCropRegion->left) {
8164 roi->rect.left = scalerCropRegion->left;
8165 }
8166 if (roi->rect.top < scalerCropRegion->top) {
8167 roi->rect.top = scalerCropRegion->top;
8168 }
8169 if (roi_x_max > crop_x_max) {
8170 roi_x_max = crop_x_max;
8171 }
8172 if (roi_y_max > crop_y_max) {
8173 roi_y_max = crop_y_max;
8174 }
8175 roi->rect.width = roi_x_max - roi->rect.left;
8176 roi->rect.height = roi_y_max - roi->rect.top;
8177 return true;
8178}
8179
8180/*===========================================================================
8181 * FUNCTION : convertLandmarks
8182 *
8183 * DESCRIPTION: helper method to extract the landmarks from face detection info
8184 *
8185 * PARAMETERS :
8186 * @landmark_data : input landmark data to be converted
8187 * @landmarks : int32_t destination array
8188 *
8189 *
8190 *==========================================================================*/
8191void QCamera3HardwareInterface::convertLandmarks(
8192 cam_face_landmarks_info_t landmark_data,
8193 int32_t *landmarks)
8194{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008195 if (landmark_data.is_left_eye_valid) {
8196 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8197 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8198 } else {
8199 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8200 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8201 }
8202
8203 if (landmark_data.is_right_eye_valid) {
8204 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8205 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8206 } else {
8207 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8208 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8209 }
8210
8211 if (landmark_data.is_mouth_valid) {
8212 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8213 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8214 } else {
8215 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8216 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8217 }
8218}
8219
8220/*===========================================================================
8221 * FUNCTION : setInvalidLandmarks
8222 *
8223 * DESCRIPTION: helper method to set invalid landmarks
8224 *
8225 * PARAMETERS :
8226 * @landmarks : int32_t destination array
8227 *
8228 *
8229 *==========================================================================*/
8230void QCamera3HardwareInterface::setInvalidLandmarks(
8231 int32_t *landmarks)
8232{
8233 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8234 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8235 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8236 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8237 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8238 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008239}
8240
8241#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008242
8243/*===========================================================================
8244 * FUNCTION : getCapabilities
8245 *
8246 * DESCRIPTION: query camera capability from back-end
8247 *
8248 * PARAMETERS :
8249 * @ops : mm-interface ops structure
8250 * @cam_handle : camera handle for which we need capability
8251 *
8252 * RETURN : ptr type of capability structure
8253 * capability for success
8254 * NULL for failure
8255 *==========================================================================*/
8256cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8257 uint32_t cam_handle)
8258{
8259 int rc = NO_ERROR;
8260 QCamera3HeapMemory *capabilityHeap = NULL;
8261 cam_capability_t *cap_ptr = NULL;
8262
8263 if (ops == NULL) {
8264 LOGE("Invalid arguments");
8265 return NULL;
8266 }
8267
8268 capabilityHeap = new QCamera3HeapMemory(1);
8269 if (capabilityHeap == NULL) {
8270 LOGE("creation of capabilityHeap failed");
8271 return NULL;
8272 }
8273
8274 /* Allocate memory for capability buffer */
8275 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8276 if(rc != OK) {
8277 LOGE("No memory for cappability");
8278 goto allocate_failed;
8279 }
8280
8281 /* Map memory for capability buffer */
8282 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8283
8284 rc = ops->map_buf(cam_handle,
8285 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8286 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8287 if(rc < 0) {
8288 LOGE("failed to map capability buffer");
8289 rc = FAILED_TRANSACTION;
8290 goto map_failed;
8291 }
8292
8293 /* Query Capability */
8294 rc = ops->query_capability(cam_handle);
8295 if(rc < 0) {
8296 LOGE("failed to query capability");
8297 rc = FAILED_TRANSACTION;
8298 goto query_failed;
8299 }
8300
8301 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8302 if (cap_ptr == NULL) {
8303 LOGE("out of memory");
8304 rc = NO_MEMORY;
8305 goto query_failed;
8306 }
8307
8308 memset(cap_ptr, 0, sizeof(cam_capability_t));
8309 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8310
8311 int index;
8312 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8313 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8314 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8315 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8316 }
8317
8318query_failed:
8319 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8320map_failed:
8321 capabilityHeap->deallocate();
8322allocate_failed:
8323 delete capabilityHeap;
8324
8325 if (rc != NO_ERROR) {
8326 return NULL;
8327 } else {
8328 return cap_ptr;
8329 }
8330}
8331
Thierry Strudel3d639192016-09-09 11:52:26 -07008332/*===========================================================================
8333 * FUNCTION : initCapabilities
8334 *
8335 * DESCRIPTION: initialize camera capabilities in static data struct
8336 *
8337 * PARAMETERS :
8338 * @cameraId : camera Id
8339 *
8340 * RETURN : int32_t type of status
8341 * NO_ERROR -- success
8342 * none-zero failure code
8343 *==========================================================================*/
8344int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8345{
8346 int rc = 0;
8347 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008348 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008349
8350 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8351 if (rc) {
8352 LOGE("camera_open failed. rc = %d", rc);
8353 goto open_failed;
8354 }
8355 if (!cameraHandle) {
8356 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8357 goto open_failed;
8358 }
8359
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008360 handle = get_main_camera_handle(cameraHandle->camera_handle);
8361 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8362 if (gCamCapability[cameraId] == NULL) {
8363 rc = FAILED_TRANSACTION;
8364 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008365 }
8366
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008367 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008368 if (is_dual_camera_by_idx(cameraId)) {
8369 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8370 gCamCapability[cameraId]->aux_cam_cap =
8371 getCapabilities(cameraHandle->ops, handle);
8372 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8373 rc = FAILED_TRANSACTION;
8374 free(gCamCapability[cameraId]);
8375 goto failed_op;
8376 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008377
8378 // Copy the main camera capability to main_cam_cap struct
8379 gCamCapability[cameraId]->main_cam_cap =
8380 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8381 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8382 LOGE("out of memory");
8383 rc = NO_MEMORY;
8384 goto failed_op;
8385 }
8386 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8387 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008388 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008389failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008390 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8391 cameraHandle = NULL;
8392open_failed:
8393 return rc;
8394}
8395
8396/*==========================================================================
8397 * FUNCTION : get3Aversion
8398 *
8399 * DESCRIPTION: get the Q3A S/W version
8400 *
8401 * PARAMETERS :
8402 * @sw_version: Reference of Q3A structure which will hold version info upon
8403 * return
8404 *
8405 * RETURN : None
8406 *
8407 *==========================================================================*/
8408void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8409{
8410 if(gCamCapability[mCameraId])
8411 sw_version = gCamCapability[mCameraId]->q3a_version;
8412 else
8413 LOGE("Capability structure NULL!");
8414}
8415
8416
8417/*===========================================================================
8418 * FUNCTION : initParameters
8419 *
8420 * DESCRIPTION: initialize camera parameters
8421 *
8422 * PARAMETERS :
8423 *
8424 * RETURN : int32_t type of status
8425 * NO_ERROR -- success
8426 * none-zero failure code
8427 *==========================================================================*/
8428int QCamera3HardwareInterface::initParameters()
8429{
8430 int rc = 0;
8431
8432 //Allocate Set Param Buffer
8433 mParamHeap = new QCamera3HeapMemory(1);
8434 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8435 if(rc != OK) {
8436 rc = NO_MEMORY;
8437 LOGE("Failed to allocate SETPARM Heap memory");
8438 delete mParamHeap;
8439 mParamHeap = NULL;
8440 return rc;
8441 }
8442
8443 //Map memory for parameters buffer
8444 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8445 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8446 mParamHeap->getFd(0),
8447 sizeof(metadata_buffer_t),
8448 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8449 if(rc < 0) {
8450 LOGE("failed to map SETPARM buffer");
8451 rc = FAILED_TRANSACTION;
8452 mParamHeap->deallocate();
8453 delete mParamHeap;
8454 mParamHeap = NULL;
8455 return rc;
8456 }
8457
8458 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8459
8460 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8461 return rc;
8462}
8463
8464/*===========================================================================
8465 * FUNCTION : deinitParameters
8466 *
8467 * DESCRIPTION: de-initialize camera parameters
8468 *
8469 * PARAMETERS :
8470 *
8471 * RETURN : NONE
8472 *==========================================================================*/
8473void QCamera3HardwareInterface::deinitParameters()
8474{
8475 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8476 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8477
8478 mParamHeap->deallocate();
8479 delete mParamHeap;
8480 mParamHeap = NULL;
8481
8482 mParameters = NULL;
8483
8484 free(mPrevParameters);
8485 mPrevParameters = NULL;
8486}
8487
8488/*===========================================================================
8489 * FUNCTION : calcMaxJpegSize
8490 *
8491 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8492 *
8493 * PARAMETERS :
8494 *
8495 * RETURN : max_jpeg_size
8496 *==========================================================================*/
8497size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8498{
8499 size_t max_jpeg_size = 0;
8500 size_t temp_width, temp_height;
8501 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8502 MAX_SIZES_CNT);
8503 for (size_t i = 0; i < count; i++) {
8504 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8505 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8506 if (temp_width * temp_height > max_jpeg_size ) {
8507 max_jpeg_size = temp_width * temp_height;
8508 }
8509 }
8510 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8511 return max_jpeg_size;
8512}
8513
8514/*===========================================================================
8515 * FUNCTION : getMaxRawSize
8516 *
8517 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8518 *
8519 * PARAMETERS :
8520 *
8521 * RETURN : Largest supported Raw Dimension
8522 *==========================================================================*/
8523cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8524{
8525 int max_width = 0;
8526 cam_dimension_t maxRawSize;
8527
8528 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8529 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8530 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8531 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8532 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8533 }
8534 }
8535 return maxRawSize;
8536}
8537
8538
8539/*===========================================================================
8540 * FUNCTION : calcMaxJpegDim
8541 *
8542 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8543 *
8544 * PARAMETERS :
8545 *
8546 * RETURN : max_jpeg_dim
8547 *==========================================================================*/
8548cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8549{
8550 cam_dimension_t max_jpeg_dim;
8551 cam_dimension_t curr_jpeg_dim;
8552 max_jpeg_dim.width = 0;
8553 max_jpeg_dim.height = 0;
8554 curr_jpeg_dim.width = 0;
8555 curr_jpeg_dim.height = 0;
8556 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8557 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8558 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8559 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8560 max_jpeg_dim.width * max_jpeg_dim.height ) {
8561 max_jpeg_dim.width = curr_jpeg_dim.width;
8562 max_jpeg_dim.height = curr_jpeg_dim.height;
8563 }
8564 }
8565 return max_jpeg_dim;
8566}
8567
8568/*===========================================================================
8569 * FUNCTION : addStreamConfig
8570 *
8571 * DESCRIPTION: adds the stream configuration to the array
8572 *
8573 * PARAMETERS :
8574 * @available_stream_configs : pointer to stream configuration array
8575 * @scalar_format : scalar format
8576 * @dim : configuration dimension
8577 * @config_type : input or output configuration type
8578 *
8579 * RETURN : NONE
8580 *==========================================================================*/
8581void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8582 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8583{
8584 available_stream_configs.add(scalar_format);
8585 available_stream_configs.add(dim.width);
8586 available_stream_configs.add(dim.height);
8587 available_stream_configs.add(config_type);
8588}
8589
8590/*===========================================================================
8591 * FUNCTION : suppportBurstCapture
8592 *
8593 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8594 *
8595 * PARAMETERS :
8596 * @cameraId : camera Id
8597 *
8598 * RETURN : true if camera supports BURST_CAPTURE
8599 * false otherwise
8600 *==========================================================================*/
8601bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8602{
8603 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8604 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8605 const int32_t highResWidth = 3264;
8606 const int32_t highResHeight = 2448;
8607
8608 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8609 // Maximum resolution images cannot be captured at >= 10fps
8610 // -> not supporting BURST_CAPTURE
8611 return false;
8612 }
8613
8614 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8615 // Maximum resolution images can be captured at >= 20fps
8616 // --> supporting BURST_CAPTURE
8617 return true;
8618 }
8619
8620 // Find the smallest highRes resolution, or largest resolution if there is none
8621 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8622 MAX_SIZES_CNT);
8623 size_t highRes = 0;
8624 while ((highRes + 1 < totalCnt) &&
8625 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8626 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8627 highResWidth * highResHeight)) {
8628 highRes++;
8629 }
8630 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8631 return true;
8632 } else {
8633 return false;
8634 }
8635}
8636
8637/*===========================================================================
8638 * FUNCTION : initStaticMetadata
8639 *
8640 * DESCRIPTION: initialize the static metadata
8641 *
8642 * PARAMETERS :
8643 * @cameraId : camera Id
8644 *
8645 * RETURN : int32_t type of status
8646 * 0 -- success
8647 * non-zero failure code
8648 *==========================================================================*/
8649int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8650{
8651 int rc = 0;
8652 CameraMetadata staticInfo;
8653 size_t count = 0;
8654 bool limitedDevice = false;
8655 char prop[PROPERTY_VALUE_MAX];
8656 bool supportBurst = false;
8657
8658 supportBurst = supportBurstCapture(cameraId);
8659
8660 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8661 * guaranteed or if min fps of max resolution is less than 20 fps, its
8662 * advertised as limited device*/
8663 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8664 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8665 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8666 !supportBurst;
8667
8668 uint8_t supportedHwLvl = limitedDevice ?
8669 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008670#ifndef USE_HAL_3_3
8671 // LEVEL_3 - This device will support level 3.
8672 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8673#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008674 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008675#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008676
8677 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8678 &supportedHwLvl, 1);
8679
8680 bool facingBack = false;
8681 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8682 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8683 facingBack = true;
8684 }
8685 /*HAL 3 only*/
8686 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8687 &gCamCapability[cameraId]->min_focus_distance, 1);
8688
8689 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8690 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8691
8692 /*should be using focal lengths but sensor doesn't provide that info now*/
8693 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8694 &gCamCapability[cameraId]->focal_length,
8695 1);
8696
8697 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8698 gCamCapability[cameraId]->apertures,
8699 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8700
8701 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8702 gCamCapability[cameraId]->filter_densities,
8703 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8704
8705
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008706 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8707 size_t mode_count =
8708 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8709 for (size_t i = 0; i < mode_count; i++) {
8710 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008712 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008713 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008714
8715 int32_t lens_shading_map_size[] = {
8716 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8717 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8718 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8719 lens_shading_map_size,
8720 sizeof(lens_shading_map_size)/sizeof(int32_t));
8721
8722 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8723 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8724
8725 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8726 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8727
8728 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8729 &gCamCapability[cameraId]->max_frame_duration, 1);
8730
8731 camera_metadata_rational baseGainFactor = {
8732 gCamCapability[cameraId]->base_gain_factor.numerator,
8733 gCamCapability[cameraId]->base_gain_factor.denominator};
8734 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8735 &baseGainFactor, 1);
8736
8737 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8738 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8739
8740 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8741 gCamCapability[cameraId]->pixel_array_size.height};
8742 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8743 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8744
8745 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8746 gCamCapability[cameraId]->active_array_size.top,
8747 gCamCapability[cameraId]->active_array_size.width,
8748 gCamCapability[cameraId]->active_array_size.height};
8749 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8750 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8751
8752 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8753 &gCamCapability[cameraId]->white_level, 1);
8754
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008755 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8756 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8757 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008758 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008759 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008760
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008761#ifndef USE_HAL_3_3
8762 bool hasBlackRegions = false;
8763 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8764 LOGW("black_region_count: %d is bounded to %d",
8765 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8766 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8767 }
8768 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8769 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8770 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8771 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8772 }
8773 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8774 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8775 hasBlackRegions = true;
8776 }
8777#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008778 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8779 &gCamCapability[cameraId]->flash_charge_duration, 1);
8780
8781 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8782 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8783
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008784 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8785 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8786 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008787 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8788 &timestampSource, 1);
8789
Thierry Strudel54dc9782017-02-15 12:12:10 -08008790 //update histogram vendor data
8791 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008792 &gCamCapability[cameraId]->histogram_size, 1);
8793
Thierry Strudel54dc9782017-02-15 12:12:10 -08008794 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 &gCamCapability[cameraId]->max_histogram_count, 1);
8796
Shuzhen Wang14415f52016-11-16 18:26:18 -08008797 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
8798 //so that app can request fewer number of bins than the maximum supported.
8799 std::vector<int32_t> histBins;
8800 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
8801 histBins.push_back(maxHistBins);
8802 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
8803 (maxHistBins & 0x1) == 0) {
8804 histBins.push_back(maxHistBins >> 1);
8805 maxHistBins >>= 1;
8806 }
8807 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
8808 histBins.data(), histBins.size());
8809
Thierry Strudel3d639192016-09-09 11:52:26 -07008810 int32_t sharpness_map_size[] = {
8811 gCamCapability[cameraId]->sharpness_map_size.width,
8812 gCamCapability[cameraId]->sharpness_map_size.height};
8813
8814 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8815 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8816
8817 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8818 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8819
8820 int32_t scalar_formats[] = {
8821 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8822 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8823 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8824 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8825 HAL_PIXEL_FORMAT_RAW10,
8826 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8827 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8828 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8829 scalar_formats,
8830 scalar_formats_count);
8831
8832 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8833 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8834 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8835 count, MAX_SIZES_CNT, available_processed_sizes);
8836 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8837 available_processed_sizes, count * 2);
8838
8839 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8840 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8841 makeTable(gCamCapability[cameraId]->raw_dim,
8842 count, MAX_SIZES_CNT, available_raw_sizes);
8843 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8844 available_raw_sizes, count * 2);
8845
8846 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8847 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8848 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8849 count, MAX_SIZES_CNT, available_fps_ranges);
8850 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8851 available_fps_ranges, count * 2);
8852
8853 camera_metadata_rational exposureCompensationStep = {
8854 gCamCapability[cameraId]->exp_compensation_step.numerator,
8855 gCamCapability[cameraId]->exp_compensation_step.denominator};
8856 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8857 &exposureCompensationStep, 1);
8858
8859 Vector<uint8_t> availableVstabModes;
8860 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8861 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008862 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008863 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008864 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008865 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008866 count = IS_TYPE_MAX;
8867 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8868 for (size_t i = 0; i < count; i++) {
8869 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8870 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8871 eisSupported = true;
8872 break;
8873 }
8874 }
8875 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008876 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8877 }
8878 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8879 availableVstabModes.array(), availableVstabModes.size());
8880
8881 /*HAL 1 and HAL 3 common*/
8882 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8883 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8884 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8885 float maxZoom = maxZoomStep/minZoomStep;
8886 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8887 &maxZoom, 1);
8888
8889 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8890 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8891
8892 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8893 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8894 max3aRegions[2] = 0; /* AF not supported */
8895 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8896 max3aRegions, 3);
8897
8898 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8899 memset(prop, 0, sizeof(prop));
8900 property_get("persist.camera.facedetect", prop, "1");
8901 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8902 LOGD("Support face detection mode: %d",
8903 supportedFaceDetectMode);
8904
8905 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008906 /* support mode should be OFF if max number of face is 0 */
8907 if (maxFaces <= 0) {
8908 supportedFaceDetectMode = 0;
8909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008910 Vector<uint8_t> availableFaceDetectModes;
8911 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8912 if (supportedFaceDetectMode == 1) {
8913 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8914 } else if (supportedFaceDetectMode == 2) {
8915 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8916 } else if (supportedFaceDetectMode == 3) {
8917 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8918 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8919 } else {
8920 maxFaces = 0;
8921 }
8922 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8923 availableFaceDetectModes.array(),
8924 availableFaceDetectModes.size());
8925 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8926 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008927 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8928 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8929 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008930
Emilian Peev7650c122017-01-19 08:24:33 -08008931#ifdef SUPPORT_DEPTH_DATA
Emilian Peev0ce959f2017-03-07 16:49:49 +00008932 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8933 //TODO: Update depth size accordingly, currently we use active array
8934 // as reference.
8935 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8936 int32_t depthHeight =
8937 gCamCapability[cameraId]->active_array_size.height;
8938 //As per spec. depth cloud should be sample count / 16
8939 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8940 assert(0 < depthSamplesCount);
8941 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
8942 &depthSamplesCount, 1);
Emilian Peev7650c122017-01-19 08:24:33 -08008943
Emilian Peev0ce959f2017-03-07 16:49:49 +00008944 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8945 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8946 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8947 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008948
Emilian Peev0ce959f2017-03-07 16:49:49 +00008949 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8950 1, 1 };
8951 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8952 depthMinDuration,
8953 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008954
Emilian Peev0ce959f2017-03-07 16:49:49 +00008955 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB,
8956 depthSamplesCount, 1, 0 };
8957 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8958 depthStallDuration,
8959 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008960
Emilian Peev0ce959f2017-03-07 16:49:49 +00008961 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8962 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8963 }
Emilian Peev7650c122017-01-19 08:24:33 -08008964#endif
8965
Thierry Strudel3d639192016-09-09 11:52:26 -07008966 int32_t exposureCompensationRange[] = {
8967 gCamCapability[cameraId]->exposure_compensation_min,
8968 gCamCapability[cameraId]->exposure_compensation_max};
8969 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8970 exposureCompensationRange,
8971 sizeof(exposureCompensationRange)/sizeof(int32_t));
8972
8973 uint8_t lensFacing = (facingBack) ?
8974 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8975 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8976
8977 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8978 available_thumbnail_sizes,
8979 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8980
8981 /*all sizes will be clubbed into this tag*/
8982 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8983 /*android.scaler.availableStreamConfigurations*/
8984 Vector<int32_t> available_stream_configs;
8985 cam_dimension_t active_array_dim;
8986 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8987 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08008988
8989 /*advertise list of input dimensions supported based on below property.
8990 By default all sizes upto 5MP will be advertised.
8991 Note that the setprop resolution format should be WxH.
8992 e.g: adb shell setprop persist.camera.input.minsize 1280x720
8993 To list all supported sizes, setprop needs to be set with "0x0" */
8994 cam_dimension_t minInputSize = {2592,1944}; //5MP
8995 memset(prop, 0, sizeof(prop));
8996 property_get("persist.camera.input.minsize", prop, "2592x1944");
8997 if (strlen(prop) > 0) {
8998 char *saveptr = NULL;
8999 char *token = strtok_r(prop, "x", &saveptr);
9000 if (token != NULL) {
9001 minInputSize.width = atoi(token);
9002 }
9003 token = strtok_r(NULL, "x", &saveptr);
9004 if (token != NULL) {
9005 minInputSize.height = atoi(token);
9006 }
9007 }
9008
Thierry Strudel3d639192016-09-09 11:52:26 -07009009 /* Add input/output stream configurations for each scalar formats*/
9010 for (size_t j = 0; j < scalar_formats_count; j++) {
9011 switch (scalar_formats[j]) {
9012 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9013 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9014 case HAL_PIXEL_FORMAT_RAW10:
9015 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9016 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9017 addStreamConfig(available_stream_configs, scalar_formats[j],
9018 gCamCapability[cameraId]->raw_dim[i],
9019 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9020 }
9021 break;
9022 case HAL_PIXEL_FORMAT_BLOB:
9023 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9024 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9025 addStreamConfig(available_stream_configs, scalar_formats[j],
9026 gCamCapability[cameraId]->picture_sizes_tbl[i],
9027 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9028 }
9029 break;
9030 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9031 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9032 default:
9033 cam_dimension_t largest_picture_size;
9034 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9035 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9036 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9037 addStreamConfig(available_stream_configs, scalar_formats[j],
9038 gCamCapability[cameraId]->picture_sizes_tbl[i],
9039 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009040 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9041 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9042 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9043 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9044 >= minInputSize.width) || (gCamCapability[cameraId]->
9045 picture_sizes_tbl[i].height >= minInputSize.height)) {
9046 addStreamConfig(available_stream_configs, scalar_formats[j],
9047 gCamCapability[cameraId]->picture_sizes_tbl[i],
9048 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9049 }
9050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009051 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009052
Thierry Strudel3d639192016-09-09 11:52:26 -07009053 break;
9054 }
9055 }
9056
9057 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9058 available_stream_configs.array(), available_stream_configs.size());
9059 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9060 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9061
9062 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9063 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9064
9065 /* android.scaler.availableMinFrameDurations */
9066 Vector<int64_t> available_min_durations;
9067 for (size_t j = 0; j < scalar_formats_count; j++) {
9068 switch (scalar_formats[j]) {
9069 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9070 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9071 case HAL_PIXEL_FORMAT_RAW10:
9072 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9073 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9074 available_min_durations.add(scalar_formats[j]);
9075 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9076 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9077 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9078 }
9079 break;
9080 default:
9081 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9082 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9083 available_min_durations.add(scalar_formats[j]);
9084 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9085 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9086 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9087 }
9088 break;
9089 }
9090 }
9091 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9092 available_min_durations.array(), available_min_durations.size());
9093
9094 Vector<int32_t> available_hfr_configs;
9095 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9096 int32_t fps = 0;
9097 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9098 case CAM_HFR_MODE_60FPS:
9099 fps = 60;
9100 break;
9101 case CAM_HFR_MODE_90FPS:
9102 fps = 90;
9103 break;
9104 case CAM_HFR_MODE_120FPS:
9105 fps = 120;
9106 break;
9107 case CAM_HFR_MODE_150FPS:
9108 fps = 150;
9109 break;
9110 case CAM_HFR_MODE_180FPS:
9111 fps = 180;
9112 break;
9113 case CAM_HFR_MODE_210FPS:
9114 fps = 210;
9115 break;
9116 case CAM_HFR_MODE_240FPS:
9117 fps = 240;
9118 break;
9119 case CAM_HFR_MODE_480FPS:
9120 fps = 480;
9121 break;
9122 case CAM_HFR_MODE_OFF:
9123 case CAM_HFR_MODE_MAX:
9124 default:
9125 break;
9126 }
9127
9128 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9129 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9130 /* For each HFR frame rate, need to advertise one variable fps range
9131 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9132 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9133 * set by the app. When video recording is started, [120, 120] is
9134 * set. This way sensor configuration does not change when recording
9135 * is started */
9136
9137 /* (width, height, fps_min, fps_max, batch_size_max) */
9138 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9139 j < MAX_SIZES_CNT; j++) {
9140 available_hfr_configs.add(
9141 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9142 available_hfr_configs.add(
9143 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9144 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9145 available_hfr_configs.add(fps);
9146 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9147
9148 /* (width, height, fps_min, fps_max, batch_size_max) */
9149 available_hfr_configs.add(
9150 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9151 available_hfr_configs.add(
9152 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9153 available_hfr_configs.add(fps);
9154 available_hfr_configs.add(fps);
9155 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9156 }
9157 }
9158 }
9159 //Advertise HFR capability only if the property is set
9160 memset(prop, 0, sizeof(prop));
9161 property_get("persist.camera.hal3hfr.enable", prop, "1");
9162 uint8_t hfrEnable = (uint8_t)atoi(prop);
9163
9164 if(hfrEnable && available_hfr_configs.array()) {
9165 staticInfo.update(
9166 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9167 available_hfr_configs.array(), available_hfr_configs.size());
9168 }
9169
9170 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9171 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9172 &max_jpeg_size, 1);
9173
9174 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9175 size_t size = 0;
9176 count = CAM_EFFECT_MODE_MAX;
9177 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9178 for (size_t i = 0; i < count; i++) {
9179 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9180 gCamCapability[cameraId]->supported_effects[i]);
9181 if (NAME_NOT_FOUND != val) {
9182 avail_effects[size] = (uint8_t)val;
9183 size++;
9184 }
9185 }
9186 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9187 avail_effects,
9188 size);
9189
9190 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9191 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9192 size_t supported_scene_modes_cnt = 0;
9193 count = CAM_SCENE_MODE_MAX;
9194 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9195 for (size_t i = 0; i < count; i++) {
9196 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9197 CAM_SCENE_MODE_OFF) {
9198 int val = lookupFwkName(SCENE_MODES_MAP,
9199 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9200 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009201
Thierry Strudel3d639192016-09-09 11:52:26 -07009202 if (NAME_NOT_FOUND != val) {
9203 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9204 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9205 supported_scene_modes_cnt++;
9206 }
9207 }
9208 }
9209 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9210 avail_scene_modes,
9211 supported_scene_modes_cnt);
9212
9213 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9214 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9215 supported_scene_modes_cnt,
9216 CAM_SCENE_MODE_MAX,
9217 scene_mode_overrides,
9218 supported_indexes,
9219 cameraId);
9220
9221 if (supported_scene_modes_cnt == 0) {
9222 supported_scene_modes_cnt = 1;
9223 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9224 }
9225
9226 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9227 scene_mode_overrides, supported_scene_modes_cnt * 3);
9228
9229 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9230 ANDROID_CONTROL_MODE_AUTO,
9231 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9232 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9233 available_control_modes,
9234 3);
9235
9236 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9237 size = 0;
9238 count = CAM_ANTIBANDING_MODE_MAX;
9239 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9240 for (size_t i = 0; i < count; i++) {
9241 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9242 gCamCapability[cameraId]->supported_antibandings[i]);
9243 if (NAME_NOT_FOUND != val) {
9244 avail_antibanding_modes[size] = (uint8_t)val;
9245 size++;
9246 }
9247
9248 }
9249 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9250 avail_antibanding_modes,
9251 size);
9252
9253 uint8_t avail_abberation_modes[] = {
9254 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9255 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9256 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9257 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9258 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9259 if (0 == count) {
9260 // If no aberration correction modes are available for a device, this advertise OFF mode
9261 size = 1;
9262 } else {
9263 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9264 // So, advertize all 3 modes if atleast any one mode is supported as per the
9265 // new M requirement
9266 size = 3;
9267 }
9268 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9269 avail_abberation_modes,
9270 size);
9271
9272 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9273 size = 0;
9274 count = CAM_FOCUS_MODE_MAX;
9275 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9276 for (size_t i = 0; i < count; i++) {
9277 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9278 gCamCapability[cameraId]->supported_focus_modes[i]);
9279 if (NAME_NOT_FOUND != val) {
9280 avail_af_modes[size] = (uint8_t)val;
9281 size++;
9282 }
9283 }
9284 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9285 avail_af_modes,
9286 size);
9287
9288 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9289 size = 0;
9290 count = CAM_WB_MODE_MAX;
9291 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9292 for (size_t i = 0; i < count; i++) {
9293 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9294 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9295 gCamCapability[cameraId]->supported_white_balances[i]);
9296 if (NAME_NOT_FOUND != val) {
9297 avail_awb_modes[size] = (uint8_t)val;
9298 size++;
9299 }
9300 }
9301 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9302 avail_awb_modes,
9303 size);
9304
9305 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9306 count = CAM_FLASH_FIRING_LEVEL_MAX;
9307 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9308 count);
9309 for (size_t i = 0; i < count; i++) {
9310 available_flash_levels[i] =
9311 gCamCapability[cameraId]->supported_firing_levels[i];
9312 }
9313 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9314 available_flash_levels, count);
9315
9316 uint8_t flashAvailable;
9317 if (gCamCapability[cameraId]->flash_available)
9318 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9319 else
9320 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9321 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9322 &flashAvailable, 1);
9323
9324 Vector<uint8_t> avail_ae_modes;
9325 count = CAM_AE_MODE_MAX;
9326 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9327 for (size_t i = 0; i < count; i++) {
9328 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9329 }
9330 if (flashAvailable) {
9331 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9332 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009333 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009334 }
9335 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9336 avail_ae_modes.array(),
9337 avail_ae_modes.size());
9338
9339 int32_t sensitivity_range[2];
9340 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9341 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9342 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9343 sensitivity_range,
9344 sizeof(sensitivity_range) / sizeof(int32_t));
9345
9346 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9347 &gCamCapability[cameraId]->max_analog_sensitivity,
9348 1);
9349
9350 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9351 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9352 &sensor_orientation,
9353 1);
9354
9355 int32_t max_output_streams[] = {
9356 MAX_STALLING_STREAMS,
9357 MAX_PROCESSED_STREAMS,
9358 MAX_RAW_STREAMS};
9359 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9360 max_output_streams,
9361 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9362
9363 uint8_t avail_leds = 0;
9364 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9365 &avail_leds, 0);
9366
9367 uint8_t focus_dist_calibrated;
9368 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9369 gCamCapability[cameraId]->focus_dist_calibrated);
9370 if (NAME_NOT_FOUND != val) {
9371 focus_dist_calibrated = (uint8_t)val;
9372 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9373 &focus_dist_calibrated, 1);
9374 }
9375
9376 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9377 size = 0;
9378 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9379 MAX_TEST_PATTERN_CNT);
9380 for (size_t i = 0; i < count; i++) {
9381 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9382 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9383 if (NAME_NOT_FOUND != testpatternMode) {
9384 avail_testpattern_modes[size] = testpatternMode;
9385 size++;
9386 }
9387 }
9388 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9389 avail_testpattern_modes,
9390 size);
9391
9392 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9393 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9394 &max_pipeline_depth,
9395 1);
9396
9397 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9398 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9399 &partial_result_count,
9400 1);
9401
9402 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9403 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9404
9405 Vector<uint8_t> available_capabilities;
9406 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9407 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9408 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9409 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9410 if (supportBurst) {
9411 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9412 }
9413 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9414 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9415 if (hfrEnable && available_hfr_configs.array()) {
9416 available_capabilities.add(
9417 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9418 }
9419
9420 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9421 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9422 }
9423 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9424 available_capabilities.array(),
9425 available_capabilities.size());
9426
9427 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9428 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9429 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9430 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9431
9432 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9433 &aeLockAvailable, 1);
9434
9435 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9436 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9437 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9438 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9439
9440 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9441 &awbLockAvailable, 1);
9442
9443 int32_t max_input_streams = 1;
9444 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9445 &max_input_streams,
9446 1);
9447
9448 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9449 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9450 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9451 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9452 HAL_PIXEL_FORMAT_YCbCr_420_888};
9453 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9454 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9455
9456 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9457 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9458 &max_latency,
9459 1);
9460
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009461#ifndef USE_HAL_3_3
9462 int32_t isp_sensitivity_range[2];
9463 isp_sensitivity_range[0] =
9464 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9465 isp_sensitivity_range[1] =
9466 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9467 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9468 isp_sensitivity_range,
9469 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9470#endif
9471
Thierry Strudel3d639192016-09-09 11:52:26 -07009472 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9473 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9474 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9475 available_hot_pixel_modes,
9476 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9477
9478 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9479 ANDROID_SHADING_MODE_FAST,
9480 ANDROID_SHADING_MODE_HIGH_QUALITY};
9481 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9482 available_shading_modes,
9483 3);
9484
9485 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9487 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9488 available_lens_shading_map_modes,
9489 2);
9490
9491 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9492 ANDROID_EDGE_MODE_FAST,
9493 ANDROID_EDGE_MODE_HIGH_QUALITY,
9494 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9495 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9496 available_edge_modes,
9497 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9498
9499 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9500 ANDROID_NOISE_REDUCTION_MODE_FAST,
9501 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9502 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9503 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9504 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9505 available_noise_red_modes,
9506 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9507
9508 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9509 ANDROID_TONEMAP_MODE_FAST,
9510 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9511 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9512 available_tonemap_modes,
9513 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9514
9515 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9516 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9517 available_hot_pixel_map_modes,
9518 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9519
9520 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9521 gCamCapability[cameraId]->reference_illuminant1);
9522 if (NAME_NOT_FOUND != val) {
9523 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9524 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9525 }
9526
9527 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9528 gCamCapability[cameraId]->reference_illuminant2);
9529 if (NAME_NOT_FOUND != val) {
9530 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9531 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9532 }
9533
9534 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9535 (void *)gCamCapability[cameraId]->forward_matrix1,
9536 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9537
9538 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9539 (void *)gCamCapability[cameraId]->forward_matrix2,
9540 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9541
9542 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9543 (void *)gCamCapability[cameraId]->color_transform1,
9544 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9545
9546 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9547 (void *)gCamCapability[cameraId]->color_transform2,
9548 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9549
9550 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9551 (void *)gCamCapability[cameraId]->calibration_transform1,
9552 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9553
9554 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9555 (void *)gCamCapability[cameraId]->calibration_transform2,
9556 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9557
9558 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9559 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9560 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9561 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9562 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9563 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9564 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9565 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9566 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9567 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9568 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9569 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9570 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9571 ANDROID_JPEG_GPS_COORDINATES,
9572 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9573 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9574 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9575 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9576 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9577 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9578 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9579 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9580 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9581 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009582#ifndef USE_HAL_3_3
9583 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9584#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009585 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009586 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009587 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9588 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009589 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009590 /* DevCamDebug metadata request_keys_basic */
9591 DEVCAMDEBUG_META_ENABLE,
9592 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009593 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9594 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009595 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009596
9597 size_t request_keys_cnt =
9598 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9599 Vector<int32_t> available_request_keys;
9600 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9601 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9602 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9603 }
9604
9605 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9606 available_request_keys.array(), available_request_keys.size());
9607
9608 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9609 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9610 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9611 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9612 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9613 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9614 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9615 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9616 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9617 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9618 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9619 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9620 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9621 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9622 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9623 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9624 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009625 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009626 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9627 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9628 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009629 ANDROID_STATISTICS_FACE_SCORES,
9630#ifndef USE_HAL_3_3
9631 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9632#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009633 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009634 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009635 // DevCamDebug metadata result_keys_basic
9636 DEVCAMDEBUG_META_ENABLE,
9637 // DevCamDebug metadata result_keys AF
9638 DEVCAMDEBUG_AF_LENS_POSITION,
9639 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9640 DEVCAMDEBUG_AF_TOF_DISTANCE,
9641 DEVCAMDEBUG_AF_LUMA,
9642 DEVCAMDEBUG_AF_HAF_STATE,
9643 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9644 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9645 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9646 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9647 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9648 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9649 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9650 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9651 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9652 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9653 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9654 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9655 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9656 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9657 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9658 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9659 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9660 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9661 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9662 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9663 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9664 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9665 // DevCamDebug metadata result_keys AEC
9666 DEVCAMDEBUG_AEC_TARGET_LUMA,
9667 DEVCAMDEBUG_AEC_COMP_LUMA,
9668 DEVCAMDEBUG_AEC_AVG_LUMA,
9669 DEVCAMDEBUG_AEC_CUR_LUMA,
9670 DEVCAMDEBUG_AEC_LINECOUNT,
9671 DEVCAMDEBUG_AEC_REAL_GAIN,
9672 DEVCAMDEBUG_AEC_EXP_INDEX,
9673 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009674 // DevCamDebug metadata result_keys zzHDR
9675 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9676 DEVCAMDEBUG_AEC_L_LINECOUNT,
9677 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9678 DEVCAMDEBUG_AEC_S_LINECOUNT,
9679 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9680 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9681 // DevCamDebug metadata result_keys ADRC
9682 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9683 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9684 DEVCAMDEBUG_AEC_GTM_RATIO,
9685 DEVCAMDEBUG_AEC_LTM_RATIO,
9686 DEVCAMDEBUG_AEC_LA_RATIO,
9687 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009688 // DevCamDebug metadata result_keys AWB
9689 DEVCAMDEBUG_AWB_R_GAIN,
9690 DEVCAMDEBUG_AWB_G_GAIN,
9691 DEVCAMDEBUG_AWB_B_GAIN,
9692 DEVCAMDEBUG_AWB_CCT,
9693 DEVCAMDEBUG_AWB_DECISION,
9694 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009695 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9696 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9697 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009698 };
9699
Thierry Strudel3d639192016-09-09 11:52:26 -07009700 size_t result_keys_cnt =
9701 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9702
9703 Vector<int32_t> available_result_keys;
9704 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9705 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9706 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9707 }
9708 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9709 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9710 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9711 }
9712 if (supportedFaceDetectMode == 1) {
9713 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9714 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9715 } else if ((supportedFaceDetectMode == 2) ||
9716 (supportedFaceDetectMode == 3)) {
9717 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9718 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9719 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009720#ifndef USE_HAL_3_3
9721 if (hasBlackRegions) {
9722 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9723 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9724 }
9725#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009726 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9727 available_result_keys.array(), available_result_keys.size());
9728
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009729 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009730 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9731 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9732 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9733 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9734 ANDROID_SCALER_CROPPING_TYPE,
9735 ANDROID_SYNC_MAX_LATENCY,
9736 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9737 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9738 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9739 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9740 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9741 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9742 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9743 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9744 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9745 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9746 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9747 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9748 ANDROID_LENS_FACING,
9749 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9750 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9751 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9752 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9753 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9754 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9755 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9756 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9757 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9758 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9759 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9760 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9761 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9762 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9763 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9764 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9765 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9766 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9767 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9768 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009769 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009770 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9771 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9772 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9773 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9774 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9775 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9776 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9777 ANDROID_CONTROL_AVAILABLE_MODES,
9778 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9779 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9780 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9781 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009782 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009783#ifdef SUPPORT_DEPTH_DATA
9784 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9785 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9786 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9787 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9788 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9789#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009790#ifndef USE_HAL_3_3
9791 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9792 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9793#endif
9794 };
9795
9796 Vector<int32_t> available_characteristics_keys;
9797 available_characteristics_keys.appendArray(characteristics_keys_basic,
9798 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9799#ifndef USE_HAL_3_3
9800 if (hasBlackRegions) {
9801 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9802 }
9803#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009804 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009805 available_characteristics_keys.array(),
9806 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009807
9808 /*available stall durations depend on the hw + sw and will be different for different devices */
9809 /*have to add for raw after implementation*/
9810 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9811 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9812
9813 Vector<int64_t> available_stall_durations;
9814 for (uint32_t j = 0; j < stall_formats_count; j++) {
9815 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9816 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9817 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9818 available_stall_durations.add(stall_formats[j]);
9819 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9820 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9821 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9822 }
9823 } else {
9824 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9825 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9826 available_stall_durations.add(stall_formats[j]);
9827 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9828 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9829 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9830 }
9831 }
9832 }
9833 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9834 available_stall_durations.array(),
9835 available_stall_durations.size());
9836
9837 //QCAMERA3_OPAQUE_RAW
9838 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9839 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9840 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9841 case LEGACY_RAW:
9842 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9843 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9844 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9845 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9846 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9847 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9848 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9849 break;
9850 case MIPI_RAW:
9851 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9852 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9853 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9854 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9855 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9856 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9857 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9858 break;
9859 default:
9860 LOGE("unknown opaque_raw_format %d",
9861 gCamCapability[cameraId]->opaque_raw_fmt);
9862 break;
9863 }
9864 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9865
9866 Vector<int32_t> strides;
9867 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9868 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9869 cam_stream_buf_plane_info_t buf_planes;
9870 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9871 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9872 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9873 &gCamCapability[cameraId]->padding_info, &buf_planes);
9874 strides.add(buf_planes.plane_info.mp[0].stride);
9875 }
9876 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9877 strides.size());
9878
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009879 //TBD: remove the following line once backend advertises zzHDR in feature mask
9880 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009881 //Video HDR default
9882 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9883 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009884 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009885 int32_t vhdr_mode[] = {
9886 QCAMERA3_VIDEO_HDR_MODE_OFF,
9887 QCAMERA3_VIDEO_HDR_MODE_ON};
9888
9889 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9890 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9891 vhdr_mode, vhdr_mode_count);
9892 }
9893
Thierry Strudel3d639192016-09-09 11:52:26 -07009894 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9895 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9896 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9897
9898 uint8_t isMonoOnly =
9899 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9900 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9901 &isMonoOnly, 1);
9902
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009903#ifndef USE_HAL_3_3
9904 Vector<int32_t> opaque_size;
9905 for (size_t j = 0; j < scalar_formats_count; j++) {
9906 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9907 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9908 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9909 cam_stream_buf_plane_info_t buf_planes;
9910
9911 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9912 &gCamCapability[cameraId]->padding_info, &buf_planes);
9913
9914 if (rc == 0) {
9915 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9916 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9917 opaque_size.add(buf_planes.plane_info.frame_len);
9918 }else {
9919 LOGE("raw frame calculation failed!");
9920 }
9921 }
9922 }
9923 }
9924
9925 if ((opaque_size.size() > 0) &&
9926 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9927 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9928 else
9929 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9930#endif
9931
Thierry Strudel04e026f2016-10-10 11:27:36 -07009932 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9933 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9934 size = 0;
9935 count = CAM_IR_MODE_MAX;
9936 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9937 for (size_t i = 0; i < count; i++) {
9938 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9939 gCamCapability[cameraId]->supported_ir_modes[i]);
9940 if (NAME_NOT_FOUND != val) {
9941 avail_ir_modes[size] = (int32_t)val;
9942 size++;
9943 }
9944 }
9945 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9946 avail_ir_modes, size);
9947 }
9948
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009949 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9950 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9951 size = 0;
9952 count = CAM_AEC_CONVERGENCE_MAX;
9953 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9954 for (size_t i = 0; i < count; i++) {
9955 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9956 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9957 if (NAME_NOT_FOUND != val) {
9958 available_instant_aec_modes[size] = (int32_t)val;
9959 size++;
9960 }
9961 }
9962 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9963 available_instant_aec_modes, size);
9964 }
9965
Thierry Strudel54dc9782017-02-15 12:12:10 -08009966 int32_t sharpness_range[] = {
9967 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9968 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9969 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9970
9971 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9972 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9973 size = 0;
9974 count = CAM_BINNING_CORRECTION_MODE_MAX;
9975 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9976 for (size_t i = 0; i < count; i++) {
9977 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9978 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9979 gCamCapability[cameraId]->supported_binning_modes[i]);
9980 if (NAME_NOT_FOUND != val) {
9981 avail_binning_modes[size] = (int32_t)val;
9982 size++;
9983 }
9984 }
9985 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9986 avail_binning_modes, size);
9987 }
9988
9989 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9990 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9991 size = 0;
9992 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9993 for (size_t i = 0; i < count; i++) {
9994 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9995 gCamCapability[cameraId]->supported_aec_modes[i]);
9996 if (NAME_NOT_FOUND != val)
9997 available_aec_modes[size++] = val;
9998 }
9999 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10000 available_aec_modes, size);
10001 }
10002
10003 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10004 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10005 size = 0;
10006 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10007 for (size_t i = 0; i < count; i++) {
10008 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10009 gCamCapability[cameraId]->supported_iso_modes[i]);
10010 if (NAME_NOT_FOUND != val)
10011 available_iso_modes[size++] = val;
10012 }
10013 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10014 available_iso_modes, size);
10015 }
10016
10017 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10018 for (size_t i = 0; i < count; i++)
10019 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10020 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10021 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10022
10023 int32_t available_saturation_range[4];
10024 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10025 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10026 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10027 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10028 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10029 available_saturation_range, 4);
10030
10031 uint8_t is_hdr_values[2];
10032 is_hdr_values[0] = 0;
10033 is_hdr_values[1] = 1;
10034 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10035 is_hdr_values, 2);
10036
10037 float is_hdr_confidence_range[2];
10038 is_hdr_confidence_range[0] = 0.0;
10039 is_hdr_confidence_range[1] = 1.0;
10040 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10041 is_hdr_confidence_range, 2);
10042
Thierry Strudel3d639192016-09-09 11:52:26 -070010043 gStaticMetadata[cameraId] = staticInfo.release();
10044 return rc;
10045}
10046
10047/*===========================================================================
10048 * FUNCTION : makeTable
10049 *
10050 * DESCRIPTION: make a table of sizes
10051 *
10052 * PARAMETERS :
10053 *
10054 *
10055 *==========================================================================*/
10056void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10057 size_t max_size, int32_t *sizeTable)
10058{
10059 size_t j = 0;
10060 if (size > max_size) {
10061 size = max_size;
10062 }
10063 for (size_t i = 0; i < size; i++) {
10064 sizeTable[j] = dimTable[i].width;
10065 sizeTable[j+1] = dimTable[i].height;
10066 j+=2;
10067 }
10068}
10069
10070/*===========================================================================
10071 * FUNCTION : makeFPSTable
10072 *
10073 * DESCRIPTION: make a table of fps ranges
10074 *
10075 * PARAMETERS :
10076 *
10077 *==========================================================================*/
10078void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10079 size_t max_size, int32_t *fpsRangesTable)
10080{
10081 size_t j = 0;
10082 if (size > max_size) {
10083 size = max_size;
10084 }
10085 for (size_t i = 0; i < size; i++) {
10086 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10087 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10088 j+=2;
10089 }
10090}
10091
10092/*===========================================================================
10093 * FUNCTION : makeOverridesList
10094 *
10095 * DESCRIPTION: make a list of scene mode overrides
10096 *
10097 * PARAMETERS :
10098 *
10099 *
10100 *==========================================================================*/
10101void QCamera3HardwareInterface::makeOverridesList(
10102 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10103 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10104{
10105 /*daemon will give a list of overrides for all scene modes.
10106 However we should send the fwk only the overrides for the scene modes
10107 supported by the framework*/
10108 size_t j = 0;
10109 if (size > max_size) {
10110 size = max_size;
10111 }
10112 size_t focus_count = CAM_FOCUS_MODE_MAX;
10113 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10114 focus_count);
10115 for (size_t i = 0; i < size; i++) {
10116 bool supt = false;
10117 size_t index = supported_indexes[i];
10118 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10119 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10120 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10121 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10122 overridesTable[index].awb_mode);
10123 if (NAME_NOT_FOUND != val) {
10124 overridesList[j+1] = (uint8_t)val;
10125 }
10126 uint8_t focus_override = overridesTable[index].af_mode;
10127 for (size_t k = 0; k < focus_count; k++) {
10128 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10129 supt = true;
10130 break;
10131 }
10132 }
10133 if (supt) {
10134 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10135 focus_override);
10136 if (NAME_NOT_FOUND != val) {
10137 overridesList[j+2] = (uint8_t)val;
10138 }
10139 } else {
10140 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10141 }
10142 j+=3;
10143 }
10144}
10145
10146/*===========================================================================
10147 * FUNCTION : filterJpegSizes
10148 *
10149 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10150 * could be downscaled to
10151 *
10152 * PARAMETERS :
10153 *
10154 * RETURN : length of jpegSizes array
10155 *==========================================================================*/
10156
10157size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10158 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10159 uint8_t downscale_factor)
10160{
10161 if (0 == downscale_factor) {
10162 downscale_factor = 1;
10163 }
10164
10165 int32_t min_width = active_array_size.width / downscale_factor;
10166 int32_t min_height = active_array_size.height / downscale_factor;
10167 size_t jpegSizesCnt = 0;
10168 if (processedSizesCnt > maxCount) {
10169 processedSizesCnt = maxCount;
10170 }
10171 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10172 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10173 jpegSizes[jpegSizesCnt] = processedSizes[i];
10174 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10175 jpegSizesCnt += 2;
10176 }
10177 }
10178 return jpegSizesCnt;
10179}
10180
10181/*===========================================================================
10182 * FUNCTION : computeNoiseModelEntryS
10183 *
10184 * DESCRIPTION: function to map a given sensitivity to the S noise
10185 * model parameters in the DNG noise model.
10186 *
10187 * PARAMETERS : sens : the sensor sensitivity
10188 *
10189 ** RETURN : S (sensor amplification) noise
10190 *
10191 *==========================================================================*/
10192double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10193 double s = gCamCapability[mCameraId]->gradient_S * sens +
10194 gCamCapability[mCameraId]->offset_S;
10195 return ((s < 0.0) ? 0.0 : s);
10196}
10197
10198/*===========================================================================
10199 * FUNCTION : computeNoiseModelEntryO
10200 *
10201 * DESCRIPTION: function to map a given sensitivity to the O noise
10202 * model parameters in the DNG noise model.
10203 *
10204 * PARAMETERS : sens : the sensor sensitivity
10205 *
10206 ** RETURN : O (sensor readout) noise
10207 *
10208 *==========================================================================*/
10209double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10210 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10211 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10212 1.0 : (1.0 * sens / max_analog_sens);
10213 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10214 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10215 return ((o < 0.0) ? 0.0 : o);
10216}
10217
10218/*===========================================================================
10219 * FUNCTION : getSensorSensitivity
10220 *
10221 * DESCRIPTION: convert iso_mode to an integer value
10222 *
10223 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10224 *
10225 ** RETURN : sensitivity supported by sensor
10226 *
10227 *==========================================================================*/
10228int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10229{
10230 int32_t sensitivity;
10231
10232 switch (iso_mode) {
10233 case CAM_ISO_MODE_100:
10234 sensitivity = 100;
10235 break;
10236 case CAM_ISO_MODE_200:
10237 sensitivity = 200;
10238 break;
10239 case CAM_ISO_MODE_400:
10240 sensitivity = 400;
10241 break;
10242 case CAM_ISO_MODE_800:
10243 sensitivity = 800;
10244 break;
10245 case CAM_ISO_MODE_1600:
10246 sensitivity = 1600;
10247 break;
10248 default:
10249 sensitivity = -1;
10250 break;
10251 }
10252 return sensitivity;
10253}
10254
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010255int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10256 if (gHdrPlusClient != nullptr) {
10257 return OK;
10258 }
10259
10260 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10261 if (gHdrPlusClient->isEaselPresentOnDevice()) {
10262 // If Easel is present, power on Easel and suspend it immediately.
10263 status_t res = gHdrPlusClient->powerOnEasel();
10264 if (res != OK) {
10265 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10266 gHdrPlusClient = nullptr;
10267 return res;
10268 }
10269
10270 res = gHdrPlusClient->suspendEasel();
10271 if (res != OK) {
10272 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10273 }
10274
10275 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10276 } else {
10277 // Destroy HDR+ client if Easel isn't present.
10278 gHdrPlusClient = nullptr;
10279 }
10280
10281 return OK;
10282}
10283
Thierry Strudel3d639192016-09-09 11:52:26 -070010284/*===========================================================================
10285 * FUNCTION : getCamInfo
10286 *
10287 * DESCRIPTION: query camera capabilities
10288 *
10289 * PARAMETERS :
10290 * @cameraId : camera Id
10291 * @info : camera info struct to be filled in with camera capabilities
10292 *
10293 * RETURN : int type of status
10294 * NO_ERROR -- success
10295 * none-zero failure code
10296 *==========================================================================*/
10297int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10298 struct camera_info *info)
10299{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010300 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010301 int rc = 0;
10302
10303 pthread_mutex_lock(&gCamLock);
10304 if (NULL == gCamCapability[cameraId]) {
10305 rc = initCapabilities(cameraId);
10306 if (rc < 0) {
10307 pthread_mutex_unlock(&gCamLock);
10308 return rc;
10309 }
10310 }
10311
10312 if (NULL == gStaticMetadata[cameraId]) {
10313 rc = initStaticMetadata(cameraId);
10314 if (rc < 0) {
10315 pthread_mutex_unlock(&gCamLock);
10316 return rc;
10317 }
10318 }
10319
10320 switch(gCamCapability[cameraId]->position) {
10321 case CAM_POSITION_BACK:
10322 case CAM_POSITION_BACK_AUX:
10323 info->facing = CAMERA_FACING_BACK;
10324 break;
10325
10326 case CAM_POSITION_FRONT:
10327 case CAM_POSITION_FRONT_AUX:
10328 info->facing = CAMERA_FACING_FRONT;
10329 break;
10330
10331 default:
10332 LOGE("Unknown position type %d for camera id:%d",
10333 gCamCapability[cameraId]->position, cameraId);
10334 rc = -1;
10335 break;
10336 }
10337
10338
10339 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010340#ifndef USE_HAL_3_3
10341 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10342#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010343 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010344#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010345 info->static_camera_characteristics = gStaticMetadata[cameraId];
10346
10347 //For now assume both cameras can operate independently.
10348 info->conflicting_devices = NULL;
10349 info->conflicting_devices_length = 0;
10350
10351 //resource cost is 100 * MIN(1.0, m/M),
10352 //where m is throughput requirement with maximum stream configuration
10353 //and M is CPP maximum throughput.
10354 float max_fps = 0.0;
10355 for (uint32_t i = 0;
10356 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10357 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10358 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10359 }
10360 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10361 gCamCapability[cameraId]->active_array_size.width *
10362 gCamCapability[cameraId]->active_array_size.height * max_fps /
10363 gCamCapability[cameraId]->max_pixel_bandwidth;
10364 info->resource_cost = 100 * MIN(1.0, ratio);
10365 LOGI("camera %d resource cost is %d", cameraId,
10366 info->resource_cost);
10367
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010368 rc = initHdrPlusClientLocked();
10369 if (rc != OK) {
10370 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10371 pthread_mutex_unlock(&gCamLock);
10372 return rc;
10373 }
10374
Thierry Strudel3d639192016-09-09 11:52:26 -070010375 pthread_mutex_unlock(&gCamLock);
10376 return rc;
10377}
10378
10379/*===========================================================================
10380 * FUNCTION : translateCapabilityToMetadata
10381 *
10382 * DESCRIPTION: translate the capability into camera_metadata_t
10383 *
10384 * PARAMETERS : type of the request
10385 *
10386 *
10387 * RETURN : success: camera_metadata_t*
10388 * failure: NULL
10389 *
10390 *==========================================================================*/
10391camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10392{
10393 if (mDefaultMetadata[type] != NULL) {
10394 return mDefaultMetadata[type];
10395 }
10396 //first time we are handling this request
10397 //fill up the metadata structure using the wrapper class
10398 CameraMetadata settings;
10399 //translate from cam_capability_t to camera_metadata_tag_t
10400 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10401 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10402 int32_t defaultRequestID = 0;
10403 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10404
10405 /* OIS disable */
10406 char ois_prop[PROPERTY_VALUE_MAX];
10407 memset(ois_prop, 0, sizeof(ois_prop));
10408 property_get("persist.camera.ois.disable", ois_prop, "0");
10409 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10410
10411 /* Force video to use OIS */
10412 char videoOisProp[PROPERTY_VALUE_MAX];
10413 memset(videoOisProp, 0, sizeof(videoOisProp));
10414 property_get("persist.camera.ois.video", videoOisProp, "1");
10415 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010416
10417 // Hybrid AE enable/disable
10418 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10419 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10420 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10421 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10422
Thierry Strudel3d639192016-09-09 11:52:26 -070010423 uint8_t controlIntent = 0;
10424 uint8_t focusMode;
10425 uint8_t vsMode;
10426 uint8_t optStabMode;
10427 uint8_t cacMode;
10428 uint8_t edge_mode;
10429 uint8_t noise_red_mode;
10430 uint8_t tonemap_mode;
10431 bool highQualityModeEntryAvailable = FALSE;
10432 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010433 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010434 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10435 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010436 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010437
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 switch (type) {
10439 case CAMERA3_TEMPLATE_PREVIEW:
10440 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10441 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10442 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10443 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10444 edge_mode = ANDROID_EDGE_MODE_FAST;
10445 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10446 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10447 break;
10448 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10449 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10450 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10451 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10452 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10453 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10454 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10455 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10456 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10457 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10458 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10459 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10460 highQualityModeEntryAvailable = TRUE;
10461 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10462 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10463 fastModeEntryAvailable = TRUE;
10464 }
10465 }
10466 if (highQualityModeEntryAvailable) {
10467 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10468 } else if (fastModeEntryAvailable) {
10469 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10470 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010471 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10472 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10473 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010474 break;
10475 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10476 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10477 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10478 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010479 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10480 edge_mode = ANDROID_EDGE_MODE_FAST;
10481 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10482 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10483 if (forceVideoOis)
10484 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10485 break;
10486 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10487 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10488 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10489 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010490 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10491 edge_mode = ANDROID_EDGE_MODE_FAST;
10492 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10493 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10494 if (forceVideoOis)
10495 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10496 break;
10497 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10498 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10499 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10500 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10501 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10502 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10503 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10504 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10505 break;
10506 case CAMERA3_TEMPLATE_MANUAL:
10507 edge_mode = ANDROID_EDGE_MODE_FAST;
10508 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10509 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10510 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10511 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10512 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10513 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10514 break;
10515 default:
10516 edge_mode = ANDROID_EDGE_MODE_FAST;
10517 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10518 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10519 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10520 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10521 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10522 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10523 break;
10524 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010525 // Set CAC to OFF if underlying device doesn't support
10526 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10527 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10528 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010529 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10530 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10531 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10532 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10533 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10534 }
10535 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010536 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010537
10538 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10539 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10540 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10541 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10542 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10543 || ois_disable)
10544 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10545 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010546 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010547
10548 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10549 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10550
10551 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10552 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10553
10554 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10555 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10556
10557 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10558 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10559
10560 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10561 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10562
10563 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10564 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10565
10566 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10567 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10568
10569 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10570 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10571
10572 /*flash*/
10573 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10574 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10575
10576 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10577 settings.update(ANDROID_FLASH_FIRING_POWER,
10578 &flashFiringLevel, 1);
10579
10580 /* lens */
10581 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10582 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10583
10584 if (gCamCapability[mCameraId]->filter_densities_count) {
10585 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10586 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10587 gCamCapability[mCameraId]->filter_densities_count);
10588 }
10589
10590 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10591 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10592
Thierry Strudel3d639192016-09-09 11:52:26 -070010593 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10594 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10595
10596 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10597 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10598
10599 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10600 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10601
10602 /* face detection (default to OFF) */
10603 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10604 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10605
Thierry Strudel54dc9782017-02-15 12:12:10 -080010606 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10607 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010608
10609 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10610 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10611
10612 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10613 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10614
Thierry Strudel3d639192016-09-09 11:52:26 -070010615
10616 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10617 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10618
10619 /* Exposure time(Update the Min Exposure Time)*/
10620 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10621 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10622
10623 /* frame duration */
10624 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10625 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10626
10627 /* sensitivity */
10628 static const int32_t default_sensitivity = 100;
10629 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010630#ifndef USE_HAL_3_3
10631 static const int32_t default_isp_sensitivity =
10632 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10633 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10634#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010635
10636 /*edge mode*/
10637 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10638
10639 /*noise reduction mode*/
10640 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10641
10642 /*color correction mode*/
10643 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10644 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10645
10646 /*transform matrix mode*/
10647 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10648
10649 int32_t scaler_crop_region[4];
10650 scaler_crop_region[0] = 0;
10651 scaler_crop_region[1] = 0;
10652 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10653 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10654 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10655
10656 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10657 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10658
10659 /*focus distance*/
10660 float focus_distance = 0.0;
10661 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10662
10663 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010664 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010665 float max_range = 0.0;
10666 float max_fixed_fps = 0.0;
10667 int32_t fps_range[2] = {0, 0};
10668 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10669 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010670 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10671 TEMPLATE_MAX_PREVIEW_FPS) {
10672 continue;
10673 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010674 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10675 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10676 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10677 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10678 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10679 if (range > max_range) {
10680 fps_range[0] =
10681 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10682 fps_range[1] =
10683 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10684 max_range = range;
10685 }
10686 } else {
10687 if (range < 0.01 && max_fixed_fps <
10688 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10689 fps_range[0] =
10690 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10691 fps_range[1] =
10692 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10693 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10694 }
10695 }
10696 }
10697 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10698
10699 /*precapture trigger*/
10700 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10701 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10702
10703 /*af trigger*/
10704 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10705 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10706
10707 /* ae & af regions */
10708 int32_t active_region[] = {
10709 gCamCapability[mCameraId]->active_array_size.left,
10710 gCamCapability[mCameraId]->active_array_size.top,
10711 gCamCapability[mCameraId]->active_array_size.left +
10712 gCamCapability[mCameraId]->active_array_size.width,
10713 gCamCapability[mCameraId]->active_array_size.top +
10714 gCamCapability[mCameraId]->active_array_size.height,
10715 0};
10716 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10717 sizeof(active_region) / sizeof(active_region[0]));
10718 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10719 sizeof(active_region) / sizeof(active_region[0]));
10720
10721 /* black level lock */
10722 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10723 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10724
Thierry Strudel3d639192016-09-09 11:52:26 -070010725 //special defaults for manual template
10726 if (type == CAMERA3_TEMPLATE_MANUAL) {
10727 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10728 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10729
10730 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10731 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10732
10733 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10734 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10735
10736 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10737 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10738
10739 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10740 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10741
10742 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10743 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10744 }
10745
10746
10747 /* TNR
10748 * We'll use this location to determine which modes TNR will be set.
10749 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10750 * This is not to be confused with linking on a per stream basis that decision
10751 * is still on per-session basis and will be handled as part of config stream
10752 */
10753 uint8_t tnr_enable = 0;
10754
10755 if (m_bTnrPreview || m_bTnrVideo) {
10756
10757 switch (type) {
10758 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10759 tnr_enable = 1;
10760 break;
10761
10762 default:
10763 tnr_enable = 0;
10764 break;
10765 }
10766
10767 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10768 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10769 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10770
10771 LOGD("TNR:%d with process plate %d for template:%d",
10772 tnr_enable, tnr_process_type, type);
10773 }
10774
10775 //Update Link tags to default
10776 int32_t sync_type = CAM_TYPE_STANDALONE;
10777 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10778
10779 int32_t is_main = 0; //this doesn't matter as app should overwrite
10780 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10781
10782 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10783
10784 /* CDS default */
10785 char prop[PROPERTY_VALUE_MAX];
10786 memset(prop, 0, sizeof(prop));
10787 property_get("persist.camera.CDS", prop, "Auto");
10788 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10789 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10790 if (CAM_CDS_MODE_MAX == cds_mode) {
10791 cds_mode = CAM_CDS_MODE_AUTO;
10792 }
10793
10794 /* Disabling CDS in templates which have TNR enabled*/
10795 if (tnr_enable)
10796 cds_mode = CAM_CDS_MODE_OFF;
10797
10798 int32_t mode = cds_mode;
10799 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010800
Thierry Strudel269c81a2016-10-12 12:13:59 -070010801 /* Manual Convergence AEC Speed is disabled by default*/
10802 float default_aec_speed = 0;
10803 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10804
10805 /* Manual Convergence AWB Speed is disabled by default*/
10806 float default_awb_speed = 0;
10807 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10808
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010809 // Set instant AEC to normal convergence by default
10810 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10811 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10812
Shuzhen Wang19463d72016-03-08 11:09:52 -080010813 /* hybrid ae */
10814 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10815
Thierry Strudel3d639192016-09-09 11:52:26 -070010816 mDefaultMetadata[type] = settings.release();
10817
10818 return mDefaultMetadata[type];
10819}
10820
10821/*===========================================================================
10822 * FUNCTION : setFrameParameters
10823 *
10824 * DESCRIPTION: set parameters per frame as requested in the metadata from
10825 * framework
10826 *
10827 * PARAMETERS :
10828 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010829 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010830 * @blob_request: Whether this request is a blob request or not
10831 *
10832 * RETURN : success: NO_ERROR
10833 * failure:
10834 *==========================================================================*/
10835int QCamera3HardwareInterface::setFrameParameters(
10836 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010837 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 int blob_request,
10839 uint32_t snapshotStreamId)
10840{
10841 /*translate from camera_metadata_t type to parm_type_t*/
10842 int rc = 0;
10843 int32_t hal_version = CAM_HAL_V3;
10844
10845 clear_metadata_buffer(mParameters);
10846 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10847 LOGE("Failed to set hal version in the parameters");
10848 return BAD_VALUE;
10849 }
10850
10851 /*we need to update the frame number in the parameters*/
10852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10853 request->frame_number)) {
10854 LOGE("Failed to set the frame number in the parameters");
10855 return BAD_VALUE;
10856 }
10857
10858 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010860 LOGE("Failed to set stream type mask in the parameters");
10861 return BAD_VALUE;
10862 }
10863
10864 if (mUpdateDebugLevel) {
10865 uint32_t dummyDebugLevel = 0;
10866 /* The value of dummyDebugLevel is irrelavent. On
10867 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10869 dummyDebugLevel)) {
10870 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10871 return BAD_VALUE;
10872 }
10873 mUpdateDebugLevel = false;
10874 }
10875
10876 if(request->settings != NULL){
10877 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10878 if (blob_request)
10879 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10880 }
10881
10882 return rc;
10883}
10884
10885/*===========================================================================
10886 * FUNCTION : setReprocParameters
10887 *
10888 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10889 * return it.
10890 *
10891 * PARAMETERS :
10892 * @request : request that needs to be serviced
10893 *
10894 * RETURN : success: NO_ERROR
10895 * failure:
10896 *==========================================================================*/
10897int32_t QCamera3HardwareInterface::setReprocParameters(
10898 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10899 uint32_t snapshotStreamId)
10900{
10901 /*translate from camera_metadata_t type to parm_type_t*/
10902 int rc = 0;
10903
10904 if (NULL == request->settings){
10905 LOGE("Reprocess settings cannot be NULL");
10906 return BAD_VALUE;
10907 }
10908
10909 if (NULL == reprocParam) {
10910 LOGE("Invalid reprocessing metadata buffer");
10911 return BAD_VALUE;
10912 }
10913 clear_metadata_buffer(reprocParam);
10914
10915 /*we need to update the frame number in the parameters*/
10916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10917 request->frame_number)) {
10918 LOGE("Failed to set the frame number in the parameters");
10919 return BAD_VALUE;
10920 }
10921
10922 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10923 if (rc < 0) {
10924 LOGE("Failed to translate reproc request");
10925 return rc;
10926 }
10927
10928 CameraMetadata frame_settings;
10929 frame_settings = request->settings;
10930 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10931 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10932 int32_t *crop_count =
10933 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10934 int32_t *crop_data =
10935 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10936 int32_t *roi_map =
10937 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10938 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10939 cam_crop_data_t crop_meta;
10940 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10941 crop_meta.num_of_streams = 1;
10942 crop_meta.crop_info[0].crop.left = crop_data[0];
10943 crop_meta.crop_info[0].crop.top = crop_data[1];
10944 crop_meta.crop_info[0].crop.width = crop_data[2];
10945 crop_meta.crop_info[0].crop.height = crop_data[3];
10946
10947 crop_meta.crop_info[0].roi_map.left =
10948 roi_map[0];
10949 crop_meta.crop_info[0].roi_map.top =
10950 roi_map[1];
10951 crop_meta.crop_info[0].roi_map.width =
10952 roi_map[2];
10953 crop_meta.crop_info[0].roi_map.height =
10954 roi_map[3];
10955
10956 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10957 rc = BAD_VALUE;
10958 }
10959 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10960 request->input_buffer->stream,
10961 crop_meta.crop_info[0].crop.left,
10962 crop_meta.crop_info[0].crop.top,
10963 crop_meta.crop_info[0].crop.width,
10964 crop_meta.crop_info[0].crop.height);
10965 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10966 request->input_buffer->stream,
10967 crop_meta.crop_info[0].roi_map.left,
10968 crop_meta.crop_info[0].roi_map.top,
10969 crop_meta.crop_info[0].roi_map.width,
10970 crop_meta.crop_info[0].roi_map.height);
10971 } else {
10972 LOGE("Invalid reprocess crop count %d!", *crop_count);
10973 }
10974 } else {
10975 LOGE("No crop data from matching output stream");
10976 }
10977
10978 /* These settings are not needed for regular requests so handle them specially for
10979 reprocess requests; information needed for EXIF tags */
10980 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10981 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10982 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10983 if (NAME_NOT_FOUND != val) {
10984 uint32_t flashMode = (uint32_t)val;
10985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10986 rc = BAD_VALUE;
10987 }
10988 } else {
10989 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10990 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10991 }
10992 } else {
10993 LOGH("No flash mode in reprocess settings");
10994 }
10995
10996 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10997 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10999 rc = BAD_VALUE;
11000 }
11001 } else {
11002 LOGH("No flash state in reprocess settings");
11003 }
11004
11005 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11006 uint8_t *reprocessFlags =
11007 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11008 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11009 *reprocessFlags)) {
11010 rc = BAD_VALUE;
11011 }
11012 }
11013
Thierry Strudel54dc9782017-02-15 12:12:10 -080011014 // Add exif debug data to internal metadata
11015 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11016 mm_jpeg_debug_exif_params_t *debug_params =
11017 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11018 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11019 // AE
11020 if (debug_params->ae_debug_params_valid == TRUE) {
11021 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11022 debug_params->ae_debug_params);
11023 }
11024 // AWB
11025 if (debug_params->awb_debug_params_valid == TRUE) {
11026 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11027 debug_params->awb_debug_params);
11028 }
11029 // AF
11030 if (debug_params->af_debug_params_valid == TRUE) {
11031 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11032 debug_params->af_debug_params);
11033 }
11034 // ASD
11035 if (debug_params->asd_debug_params_valid == TRUE) {
11036 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11037 debug_params->asd_debug_params);
11038 }
11039 // Stats
11040 if (debug_params->stats_debug_params_valid == TRUE) {
11041 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11042 debug_params->stats_debug_params);
11043 }
11044 // BE Stats
11045 if (debug_params->bestats_debug_params_valid == TRUE) {
11046 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11047 debug_params->bestats_debug_params);
11048 }
11049 // BHIST
11050 if (debug_params->bhist_debug_params_valid == TRUE) {
11051 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11052 debug_params->bhist_debug_params);
11053 }
11054 // 3A Tuning
11055 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11056 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11057 debug_params->q3a_tuning_debug_params);
11058 }
11059 }
11060
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011061 // Add metadata which reprocess needs
11062 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11063 cam_reprocess_info_t *repro_info =
11064 (cam_reprocess_info_t *)frame_settings.find
11065 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011066 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011067 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011068 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011069 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011070 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011071 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011072 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011073 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011074 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011075 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011076 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011077 repro_info->pipeline_flip);
11078 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11079 repro_info->af_roi);
11080 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11081 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011082 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11083 CAM_INTF_PARM_ROTATION metadata then has been added in
11084 translateToHalMetadata. HAL need to keep this new rotation
11085 metadata. Otherwise, the old rotation info saved in the vendor tag
11086 would be used */
11087 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11088 CAM_INTF_PARM_ROTATION, reprocParam) {
11089 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11090 } else {
11091 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011092 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011093 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011094 }
11095
11096 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11097 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11098 roi.width and roi.height would be the final JPEG size.
11099 For now, HAL only checks this for reprocess request */
11100 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11101 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11102 uint8_t *enable =
11103 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11104 if (*enable == TRUE) {
11105 int32_t *crop_data =
11106 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11107 cam_stream_crop_info_t crop_meta;
11108 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11109 crop_meta.stream_id = 0;
11110 crop_meta.crop.left = crop_data[0];
11111 crop_meta.crop.top = crop_data[1];
11112 crop_meta.crop.width = crop_data[2];
11113 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011114 // The JPEG crop roi should match cpp output size
11115 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11116 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11117 crop_meta.roi_map.left = 0;
11118 crop_meta.roi_map.top = 0;
11119 crop_meta.roi_map.width = cpp_crop->crop.width;
11120 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011121 }
11122 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11123 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011124 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011125 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011126 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11127 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011128 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011129 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11130
11131 // Add JPEG scale information
11132 cam_dimension_t scale_dim;
11133 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11134 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11135 int32_t *roi =
11136 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11137 scale_dim.width = roi[2];
11138 scale_dim.height = roi[3];
11139 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11140 scale_dim);
11141 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11142 scale_dim.width, scale_dim.height, mCameraId);
11143 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011144 }
11145 }
11146
11147 return rc;
11148}
11149
11150/*===========================================================================
11151 * FUNCTION : saveRequestSettings
11152 *
11153 * DESCRIPTION: Add any settings that might have changed to the request settings
11154 * and save the settings to be applied on the frame
11155 *
11156 * PARAMETERS :
11157 * @jpegMetadata : the extracted and/or modified jpeg metadata
11158 * @request : request with initial settings
11159 *
11160 * RETURN :
11161 * camera_metadata_t* : pointer to the saved request settings
11162 *==========================================================================*/
11163camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11164 const CameraMetadata &jpegMetadata,
11165 camera3_capture_request_t *request)
11166{
11167 camera_metadata_t *resultMetadata;
11168 CameraMetadata camMetadata;
11169 camMetadata = request->settings;
11170
11171 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11172 int32_t thumbnail_size[2];
11173 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11174 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11175 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11176 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11177 }
11178
11179 if (request->input_buffer != NULL) {
11180 uint8_t reprocessFlags = 1;
11181 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11182 (uint8_t*)&reprocessFlags,
11183 sizeof(reprocessFlags));
11184 }
11185
11186 resultMetadata = camMetadata.release();
11187 return resultMetadata;
11188}
11189
11190/*===========================================================================
11191 * FUNCTION : setHalFpsRange
11192 *
11193 * DESCRIPTION: set FPS range parameter
11194 *
11195 *
11196 * PARAMETERS :
11197 * @settings : Metadata from framework
11198 * @hal_metadata: Metadata buffer
11199 *
11200 *
11201 * RETURN : success: NO_ERROR
11202 * failure:
11203 *==========================================================================*/
11204int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11205 metadata_buffer_t *hal_metadata)
11206{
11207 int32_t rc = NO_ERROR;
11208 cam_fps_range_t fps_range;
11209 fps_range.min_fps = (float)
11210 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11211 fps_range.max_fps = (float)
11212 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11213 fps_range.video_min_fps = fps_range.min_fps;
11214 fps_range.video_max_fps = fps_range.max_fps;
11215
11216 LOGD("aeTargetFpsRange fps: [%f %f]",
11217 fps_range.min_fps, fps_range.max_fps);
11218 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11219 * follows:
11220 * ---------------------------------------------------------------|
11221 * Video stream is absent in configure_streams |
11222 * (Camcorder preview before the first video record |
11223 * ---------------------------------------------------------------|
11224 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11225 * | | | vid_min/max_fps|
11226 * ---------------------------------------------------------------|
11227 * NO | [ 30, 240] | 240 | [240, 240] |
11228 * |-------------|-------------|----------------|
11229 * | [240, 240] | 240 | [240, 240] |
11230 * ---------------------------------------------------------------|
11231 * Video stream is present in configure_streams |
11232 * ---------------------------------------------------------------|
11233 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11234 * | | | vid_min/max_fps|
11235 * ---------------------------------------------------------------|
11236 * NO | [ 30, 240] | 240 | [240, 240] |
11237 * (camcorder prev |-------------|-------------|----------------|
11238 * after video rec | [240, 240] | 240 | [240, 240] |
11239 * is stopped) | | | |
11240 * ---------------------------------------------------------------|
11241 * YES | [ 30, 240] | 240 | [240, 240] |
11242 * |-------------|-------------|----------------|
11243 * | [240, 240] | 240 | [240, 240] |
11244 * ---------------------------------------------------------------|
11245 * When Video stream is absent in configure_streams,
11246 * preview fps = sensor_fps / batchsize
11247 * Eg: for 240fps at batchSize 4, preview = 60fps
11248 * for 120fps at batchSize 4, preview = 30fps
11249 *
11250 * When video stream is present in configure_streams, preview fps is as per
11251 * the ratio of preview buffers to video buffers requested in process
11252 * capture request
11253 */
11254 mBatchSize = 0;
11255 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11256 fps_range.min_fps = fps_range.video_max_fps;
11257 fps_range.video_min_fps = fps_range.video_max_fps;
11258 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11259 fps_range.max_fps);
11260 if (NAME_NOT_FOUND != val) {
11261 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11263 return BAD_VALUE;
11264 }
11265
11266 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11267 /* If batchmode is currently in progress and the fps changes,
11268 * set the flag to restart the sensor */
11269 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11270 (mHFRVideoFps != fps_range.max_fps)) {
11271 mNeedSensorRestart = true;
11272 }
11273 mHFRVideoFps = fps_range.max_fps;
11274 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11275 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11276 mBatchSize = MAX_HFR_BATCH_SIZE;
11277 }
11278 }
11279 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11280
11281 }
11282 } else {
11283 /* HFR mode is session param in backend/ISP. This should be reset when
11284 * in non-HFR mode */
11285 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11287 return BAD_VALUE;
11288 }
11289 }
11290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11291 return BAD_VALUE;
11292 }
11293 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11294 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11295 return rc;
11296}
11297
11298/*===========================================================================
11299 * FUNCTION : translateToHalMetadata
11300 *
11301 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11302 *
11303 *
11304 * PARAMETERS :
11305 * @request : request sent from framework
11306 *
11307 *
11308 * RETURN : success: NO_ERROR
11309 * failure:
11310 *==========================================================================*/
11311int QCamera3HardwareInterface::translateToHalMetadata
11312 (const camera3_capture_request_t *request,
11313 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011314 uint32_t snapshotStreamId) {
11315 if (request == nullptr || hal_metadata == nullptr) {
11316 return BAD_VALUE;
11317 }
11318
11319 int64_t minFrameDuration = getMinFrameDuration(request);
11320
11321 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11322 minFrameDuration);
11323}
11324
11325int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11326 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11327 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11328
Thierry Strudel3d639192016-09-09 11:52:26 -070011329 int rc = 0;
11330 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011331 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011332
11333 /* Do not change the order of the following list unless you know what you are
11334 * doing.
11335 * The order is laid out in such a way that parameters in the front of the table
11336 * may be used to override the parameters later in the table. Examples are:
11337 * 1. META_MODE should precede AEC/AWB/AF MODE
11338 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11339 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11340 * 4. Any mode should precede it's corresponding settings
11341 */
11342 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11343 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11344 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11345 rc = BAD_VALUE;
11346 }
11347 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11348 if (rc != NO_ERROR) {
11349 LOGE("extractSceneMode failed");
11350 }
11351 }
11352
11353 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11354 uint8_t fwk_aeMode =
11355 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11356 uint8_t aeMode;
11357 int32_t redeye;
11358
11359 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11360 aeMode = CAM_AE_MODE_OFF;
11361 } else {
11362 aeMode = CAM_AE_MODE_ON;
11363 }
11364 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11365 redeye = 1;
11366 } else {
11367 redeye = 0;
11368 }
11369
11370 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11371 fwk_aeMode);
11372 if (NAME_NOT_FOUND != val) {
11373 int32_t flashMode = (int32_t)val;
11374 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11375 }
11376
11377 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11379 rc = BAD_VALUE;
11380 }
11381 }
11382
11383 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11384 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11385 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11386 fwk_whiteLevel);
11387 if (NAME_NOT_FOUND != val) {
11388 uint8_t whiteLevel = (uint8_t)val;
11389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11390 rc = BAD_VALUE;
11391 }
11392 }
11393 }
11394
11395 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11396 uint8_t fwk_cacMode =
11397 frame_settings.find(
11398 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11399 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11400 fwk_cacMode);
11401 if (NAME_NOT_FOUND != val) {
11402 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11403 bool entryAvailable = FALSE;
11404 // Check whether Frameworks set CAC mode is supported in device or not
11405 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11406 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11407 entryAvailable = TRUE;
11408 break;
11409 }
11410 }
11411 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11412 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11413 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11414 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11415 if (entryAvailable == FALSE) {
11416 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11417 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11418 } else {
11419 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11420 // High is not supported and so set the FAST as spec say's underlying
11421 // device implementation can be the same for both modes.
11422 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11423 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11424 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11425 // in order to avoid the fps drop due to high quality
11426 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11427 } else {
11428 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11429 }
11430 }
11431 }
11432 LOGD("Final cacMode is %d", cacMode);
11433 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11434 rc = BAD_VALUE;
11435 }
11436 } else {
11437 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11438 }
11439 }
11440
Thierry Strudel2896d122017-02-23 19:18:03 -080011441 char af_value[PROPERTY_VALUE_MAX];
11442 property_get("persist.camera.af.infinity", af_value, "0");
11443
Jason Lee84ae9972017-02-24 13:24:24 -080011444 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011445 if (atoi(af_value) == 0) {
11446 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011447 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011448 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11449 fwk_focusMode);
11450 if (NAME_NOT_FOUND != val) {
11451 uint8_t focusMode = (uint8_t)val;
11452 LOGD("set focus mode %d", focusMode);
11453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11454 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11455 rc = BAD_VALUE;
11456 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011457 }
11458 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011459 } else {
11460 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11461 LOGE("Focus forced to infinity %d", focusMode);
11462 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11463 rc = BAD_VALUE;
11464 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011465 }
11466
Jason Lee84ae9972017-02-24 13:24:24 -080011467 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11468 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011469 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11470 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11471 focalDistance)) {
11472 rc = BAD_VALUE;
11473 }
11474 }
11475
11476 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11477 uint8_t fwk_antibandingMode =
11478 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11479 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11480 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11481 if (NAME_NOT_FOUND != val) {
11482 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011483 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11484 if (m60HzZone) {
11485 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11486 } else {
11487 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11488 }
11489 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11491 hal_antibandingMode)) {
11492 rc = BAD_VALUE;
11493 }
11494 }
11495 }
11496
11497 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11498 int32_t expCompensation = frame_settings.find(
11499 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11500 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11501 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11502 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11503 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011504 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11506 expCompensation)) {
11507 rc = BAD_VALUE;
11508 }
11509 }
11510
11511 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11512 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11513 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11514 rc = BAD_VALUE;
11515 }
11516 }
11517 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11518 rc = setHalFpsRange(frame_settings, hal_metadata);
11519 if (rc != NO_ERROR) {
11520 LOGE("setHalFpsRange failed");
11521 }
11522 }
11523
11524 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11525 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11527 rc = BAD_VALUE;
11528 }
11529 }
11530
11531 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11532 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11533 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11534 fwk_effectMode);
11535 if (NAME_NOT_FOUND != val) {
11536 uint8_t effectMode = (uint8_t)val;
11537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11538 rc = BAD_VALUE;
11539 }
11540 }
11541 }
11542
11543 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11544 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11545 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11546 colorCorrectMode)) {
11547 rc = BAD_VALUE;
11548 }
11549 }
11550
11551 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11552 cam_color_correct_gains_t colorCorrectGains;
11553 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11554 colorCorrectGains.gains[i] =
11555 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11556 }
11557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11558 colorCorrectGains)) {
11559 rc = BAD_VALUE;
11560 }
11561 }
11562
11563 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11564 cam_color_correct_matrix_t colorCorrectTransform;
11565 cam_rational_type_t transform_elem;
11566 size_t num = 0;
11567 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11568 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11569 transform_elem.numerator =
11570 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11571 transform_elem.denominator =
11572 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11573 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11574 num++;
11575 }
11576 }
11577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11578 colorCorrectTransform)) {
11579 rc = BAD_VALUE;
11580 }
11581 }
11582
11583 cam_trigger_t aecTrigger;
11584 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11585 aecTrigger.trigger_id = -1;
11586 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11587 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11588 aecTrigger.trigger =
11589 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11590 aecTrigger.trigger_id =
11591 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11593 aecTrigger)) {
11594 rc = BAD_VALUE;
11595 }
11596 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11597 aecTrigger.trigger, aecTrigger.trigger_id);
11598 }
11599
11600 /*af_trigger must come with a trigger id*/
11601 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11602 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11603 cam_trigger_t af_trigger;
11604 af_trigger.trigger =
11605 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11606 af_trigger.trigger_id =
11607 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11609 rc = BAD_VALUE;
11610 }
11611 LOGD("AfTrigger: %d AfTriggerID: %d",
11612 af_trigger.trigger, af_trigger.trigger_id);
11613 }
11614
11615 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11616 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11618 rc = BAD_VALUE;
11619 }
11620 }
11621 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11622 cam_edge_application_t edge_application;
11623 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011624
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11626 edge_application.sharpness = 0;
11627 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011628 edge_application.sharpness =
11629 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11630 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11631 int32_t sharpness =
11632 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11633 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11634 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11635 LOGD("Setting edge mode sharpness %d", sharpness);
11636 edge_application.sharpness = sharpness;
11637 }
11638 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011639 }
11640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11641 rc = BAD_VALUE;
11642 }
11643 }
11644
11645 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11646 int32_t respectFlashMode = 1;
11647 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11648 uint8_t fwk_aeMode =
11649 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11650 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11651 respectFlashMode = 0;
11652 LOGH("AE Mode controls flash, ignore android.flash.mode");
11653 }
11654 }
11655 if (respectFlashMode) {
11656 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11657 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11658 LOGH("flash mode after mapping %d", val);
11659 // To check: CAM_INTF_META_FLASH_MODE usage
11660 if (NAME_NOT_FOUND != val) {
11661 uint8_t flashMode = (uint8_t)val;
11662 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11663 rc = BAD_VALUE;
11664 }
11665 }
11666 }
11667 }
11668
11669 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11670 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11672 rc = BAD_VALUE;
11673 }
11674 }
11675
11676 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11677 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11679 flashFiringTime)) {
11680 rc = BAD_VALUE;
11681 }
11682 }
11683
11684 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11685 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11687 hotPixelMode)) {
11688 rc = BAD_VALUE;
11689 }
11690 }
11691
11692 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11693 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11694 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11695 lensAperture)) {
11696 rc = BAD_VALUE;
11697 }
11698 }
11699
11700 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11701 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11702 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11703 filterDensity)) {
11704 rc = BAD_VALUE;
11705 }
11706 }
11707
11708 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11709 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11711 focalLength)) {
11712 rc = BAD_VALUE;
11713 }
11714 }
11715
11716 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11717 uint8_t optStabMode =
11718 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11720 optStabMode)) {
11721 rc = BAD_VALUE;
11722 }
11723 }
11724
11725 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11726 uint8_t videoStabMode =
11727 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11728 LOGD("videoStabMode from APP = %d", videoStabMode);
11729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11730 videoStabMode)) {
11731 rc = BAD_VALUE;
11732 }
11733 }
11734
11735
11736 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11737 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11738 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11739 noiseRedMode)) {
11740 rc = BAD_VALUE;
11741 }
11742 }
11743
11744 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11745 float reprocessEffectiveExposureFactor =
11746 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11748 reprocessEffectiveExposureFactor)) {
11749 rc = BAD_VALUE;
11750 }
11751 }
11752
11753 cam_crop_region_t scalerCropRegion;
11754 bool scalerCropSet = false;
11755 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11756 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11757 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11758 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11759 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11760
11761 // Map coordinate system from active array to sensor output.
11762 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11763 scalerCropRegion.width, scalerCropRegion.height);
11764
11765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11766 scalerCropRegion)) {
11767 rc = BAD_VALUE;
11768 }
11769 scalerCropSet = true;
11770 }
11771
11772 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11773 int64_t sensorExpTime =
11774 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11775 LOGD("setting sensorExpTime %lld", sensorExpTime);
11776 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11777 sensorExpTime)) {
11778 rc = BAD_VALUE;
11779 }
11780 }
11781
11782 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11783 int64_t sensorFrameDuration =
11784 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011785 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11786 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11787 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11788 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11790 sensorFrameDuration)) {
11791 rc = BAD_VALUE;
11792 }
11793 }
11794
11795 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11796 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11797 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11798 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11799 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11800 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11801 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11803 sensorSensitivity)) {
11804 rc = BAD_VALUE;
11805 }
11806 }
11807
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011808#ifndef USE_HAL_3_3
11809 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11810 int32_t ispSensitivity =
11811 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11812 if (ispSensitivity <
11813 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11814 ispSensitivity =
11815 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11816 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11817 }
11818 if (ispSensitivity >
11819 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11820 ispSensitivity =
11821 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11822 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11823 }
11824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11825 ispSensitivity)) {
11826 rc = BAD_VALUE;
11827 }
11828 }
11829#endif
11830
Thierry Strudel3d639192016-09-09 11:52:26 -070011831 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11832 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11834 rc = BAD_VALUE;
11835 }
11836 }
11837
11838 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11839 uint8_t fwk_facedetectMode =
11840 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11841
11842 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11843 fwk_facedetectMode);
11844
11845 if (NAME_NOT_FOUND != val) {
11846 uint8_t facedetectMode = (uint8_t)val;
11847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11848 facedetectMode)) {
11849 rc = BAD_VALUE;
11850 }
11851 }
11852 }
11853
Thierry Strudel54dc9782017-02-15 12:12:10 -080011854 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011855 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011856 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11858 histogramMode)) {
11859 rc = BAD_VALUE;
11860 }
11861 }
11862
11863 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11864 uint8_t sharpnessMapMode =
11865 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11866 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11867 sharpnessMapMode)) {
11868 rc = BAD_VALUE;
11869 }
11870 }
11871
11872 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11873 uint8_t tonemapMode =
11874 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11876 rc = BAD_VALUE;
11877 }
11878 }
11879 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11880 /*All tonemap channels will have the same number of points*/
11881 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11882 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11883 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11884 cam_rgb_tonemap_curves tonemapCurves;
11885 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11886 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11887 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11888 tonemapCurves.tonemap_points_cnt,
11889 CAM_MAX_TONEMAP_CURVE_SIZE);
11890 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11891 }
11892
11893 /* ch0 = G*/
11894 size_t point = 0;
11895 cam_tonemap_curve_t tonemapCurveGreen;
11896 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11897 for (size_t j = 0; j < 2; j++) {
11898 tonemapCurveGreen.tonemap_points[i][j] =
11899 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11900 point++;
11901 }
11902 }
11903 tonemapCurves.curves[0] = tonemapCurveGreen;
11904
11905 /* ch 1 = B */
11906 point = 0;
11907 cam_tonemap_curve_t tonemapCurveBlue;
11908 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11909 for (size_t j = 0; j < 2; j++) {
11910 tonemapCurveBlue.tonemap_points[i][j] =
11911 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11912 point++;
11913 }
11914 }
11915 tonemapCurves.curves[1] = tonemapCurveBlue;
11916
11917 /* ch 2 = R */
11918 point = 0;
11919 cam_tonemap_curve_t tonemapCurveRed;
11920 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11921 for (size_t j = 0; j < 2; j++) {
11922 tonemapCurveRed.tonemap_points[i][j] =
11923 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11924 point++;
11925 }
11926 }
11927 tonemapCurves.curves[2] = tonemapCurveRed;
11928
11929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11930 tonemapCurves)) {
11931 rc = BAD_VALUE;
11932 }
11933 }
11934
11935 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11936 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11938 captureIntent)) {
11939 rc = BAD_VALUE;
11940 }
11941 }
11942
11943 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11944 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11945 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11946 blackLevelLock)) {
11947 rc = BAD_VALUE;
11948 }
11949 }
11950
11951 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11952 uint8_t lensShadingMapMode =
11953 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11955 lensShadingMapMode)) {
11956 rc = BAD_VALUE;
11957 }
11958 }
11959
11960 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11961 cam_area_t roi;
11962 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011963 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011964
11965 // Map coordinate system from active array to sensor output.
11966 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11967 roi.rect.height);
11968
11969 if (scalerCropSet) {
11970 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11971 }
11972 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11973 rc = BAD_VALUE;
11974 }
11975 }
11976
11977 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11978 cam_area_t roi;
11979 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011980 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011981
11982 // Map coordinate system from active array to sensor output.
11983 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11984 roi.rect.height);
11985
11986 if (scalerCropSet) {
11987 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11988 }
11989 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11990 rc = BAD_VALUE;
11991 }
11992 }
11993
11994 // CDS for non-HFR non-video mode
11995 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11996 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11997 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11998 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11999 LOGE("Invalid CDS mode %d!", *fwk_cds);
12000 } else {
12001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12002 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12003 rc = BAD_VALUE;
12004 }
12005 }
12006 }
12007
Thierry Strudel04e026f2016-10-10 11:27:36 -070012008 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012009 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012010 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012011 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12012 }
12013 if (m_bVideoHdrEnabled)
12014 vhdr = CAM_VIDEO_HDR_MODE_ON;
12015
Thierry Strudel54dc9782017-02-15 12:12:10 -080012016 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12017
12018 if(vhdr != curr_hdr_state)
12019 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12020
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012021 rc = setVideoHdrMode(mParameters, vhdr);
12022 if (rc != NO_ERROR) {
12023 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012024 }
12025
12026 //IR
12027 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12028 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12029 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012030 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12031 uint8_t isIRon = 0;
12032
12033 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012034 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12035 LOGE("Invalid IR mode %d!", fwk_ir);
12036 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012037 if(isIRon != curr_ir_state )
12038 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12039
Thierry Strudel04e026f2016-10-10 11:27:36 -070012040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12041 CAM_INTF_META_IR_MODE, fwk_ir)) {
12042 rc = BAD_VALUE;
12043 }
12044 }
12045 }
12046
Thierry Strudel54dc9782017-02-15 12:12:10 -080012047 //Binning Correction Mode
12048 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12049 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12050 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12051 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12052 || (0 > fwk_binning_correction)) {
12053 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12054 } else {
12055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12056 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060 }
12061
Thierry Strudel269c81a2016-10-12 12:13:59 -070012062 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12063 float aec_speed;
12064 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12065 LOGD("AEC Speed :%f", aec_speed);
12066 if ( aec_speed < 0 ) {
12067 LOGE("Invalid AEC mode %f!", aec_speed);
12068 } else {
12069 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12070 aec_speed)) {
12071 rc = BAD_VALUE;
12072 }
12073 }
12074 }
12075
12076 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12077 float awb_speed;
12078 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12079 LOGD("AWB Speed :%f", awb_speed);
12080 if ( awb_speed < 0 ) {
12081 LOGE("Invalid AWB mode %f!", awb_speed);
12082 } else {
12083 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12084 awb_speed)) {
12085 rc = BAD_VALUE;
12086 }
12087 }
12088 }
12089
Thierry Strudel3d639192016-09-09 11:52:26 -070012090 // TNR
12091 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12092 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12093 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012094 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012095 cam_denoise_param_t tnr;
12096 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12097 tnr.process_plates =
12098 (cam_denoise_process_type_t)frame_settings.find(
12099 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12100 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012101
12102 if(b_TnrRequested != curr_tnr_state)
12103 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12104
Thierry Strudel3d639192016-09-09 11:52:26 -070012105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12106 rc = BAD_VALUE;
12107 }
12108 }
12109
Thierry Strudel54dc9782017-02-15 12:12:10 -080012110 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012111 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012112 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12114 *exposure_metering_mode)) {
12115 rc = BAD_VALUE;
12116 }
12117 }
12118
Thierry Strudel3d639192016-09-09 11:52:26 -070012119 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12120 int32_t fwk_testPatternMode =
12121 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12122 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12123 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12124
12125 if (NAME_NOT_FOUND != testPatternMode) {
12126 cam_test_pattern_data_t testPatternData;
12127 memset(&testPatternData, 0, sizeof(testPatternData));
12128 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12129 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12130 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12131 int32_t *fwk_testPatternData =
12132 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12133 testPatternData.r = fwk_testPatternData[0];
12134 testPatternData.b = fwk_testPatternData[3];
12135 switch (gCamCapability[mCameraId]->color_arrangement) {
12136 case CAM_FILTER_ARRANGEMENT_RGGB:
12137 case CAM_FILTER_ARRANGEMENT_GRBG:
12138 testPatternData.gr = fwk_testPatternData[1];
12139 testPatternData.gb = fwk_testPatternData[2];
12140 break;
12141 case CAM_FILTER_ARRANGEMENT_GBRG:
12142 case CAM_FILTER_ARRANGEMENT_BGGR:
12143 testPatternData.gr = fwk_testPatternData[2];
12144 testPatternData.gb = fwk_testPatternData[1];
12145 break;
12146 default:
12147 LOGE("color arrangement %d is not supported",
12148 gCamCapability[mCameraId]->color_arrangement);
12149 break;
12150 }
12151 }
12152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12153 testPatternData)) {
12154 rc = BAD_VALUE;
12155 }
12156 } else {
12157 LOGE("Invalid framework sensor test pattern mode %d",
12158 fwk_testPatternMode);
12159 }
12160 }
12161
12162 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12163 size_t count = 0;
12164 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12165 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12166 gps_coords.data.d, gps_coords.count, count);
12167 if (gps_coords.count != count) {
12168 rc = BAD_VALUE;
12169 }
12170 }
12171
12172 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12173 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12174 size_t count = 0;
12175 const char *gps_methods_src = (const char *)
12176 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12177 memset(gps_methods, '\0', sizeof(gps_methods));
12178 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12179 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12180 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12181 if (GPS_PROCESSING_METHOD_SIZE != count) {
12182 rc = BAD_VALUE;
12183 }
12184 }
12185
12186 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12187 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12189 gps_timestamp)) {
12190 rc = BAD_VALUE;
12191 }
12192 }
12193
12194 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12195 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12196 cam_rotation_info_t rotation_info;
12197 if (orientation == 0) {
12198 rotation_info.rotation = ROTATE_0;
12199 } else if (orientation == 90) {
12200 rotation_info.rotation = ROTATE_90;
12201 } else if (orientation == 180) {
12202 rotation_info.rotation = ROTATE_180;
12203 } else if (orientation == 270) {
12204 rotation_info.rotation = ROTATE_270;
12205 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012206 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012207 rotation_info.streamId = snapshotStreamId;
12208 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12210 rc = BAD_VALUE;
12211 }
12212 }
12213
12214 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12215 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12217 rc = BAD_VALUE;
12218 }
12219 }
12220
12221 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12222 uint32_t thumb_quality = (uint32_t)
12223 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12225 thumb_quality)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12231 cam_dimension_t dim;
12232 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12233 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12235 rc = BAD_VALUE;
12236 }
12237 }
12238
12239 // Internal metadata
12240 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12241 size_t count = 0;
12242 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12243 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12244 privatedata.data.i32, privatedata.count, count);
12245 if (privatedata.count != count) {
12246 rc = BAD_VALUE;
12247 }
12248 }
12249
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012250 // ISO/Exposure Priority
12251 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12252 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12253 cam_priority_mode_t mode =
12254 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12255 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12256 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12257 use_iso_exp_pty.previewOnly = FALSE;
12258 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12259 use_iso_exp_pty.value = *ptr;
12260
12261 if(CAM_ISO_PRIORITY == mode) {
12262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12263 use_iso_exp_pty)) {
12264 rc = BAD_VALUE;
12265 }
12266 }
12267 else {
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12269 use_iso_exp_pty)) {
12270 rc = BAD_VALUE;
12271 }
12272 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012273
12274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278 } else {
12279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12280 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012281 }
12282 }
12283
12284 // Saturation
12285 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12286 int32_t* use_saturation =
12287 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12289 rc = BAD_VALUE;
12290 }
12291 }
12292
Thierry Strudel3d639192016-09-09 11:52:26 -070012293 // EV step
12294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12295 gCamCapability[mCameraId]->exp_compensation_step)) {
12296 rc = BAD_VALUE;
12297 }
12298
12299 // CDS info
12300 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12301 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12302 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12303
12304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12305 CAM_INTF_META_CDS_DATA, *cdsData)) {
12306 rc = BAD_VALUE;
12307 }
12308 }
12309
Shuzhen Wang19463d72016-03-08 11:09:52 -080012310 // Hybrid AE
12311 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12312 uint8_t *hybrid_ae = (uint8_t *)
12313 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12314
12315 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12316 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12317 rc = BAD_VALUE;
12318 }
12319 }
12320
Shuzhen Wang14415f52016-11-16 18:26:18 -080012321 // Histogram
12322 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12323 uint8_t histogramMode =
12324 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12326 histogramMode)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330
12331 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12332 int32_t histogramBins =
12333 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12335 histogramBins)) {
12336 rc = BAD_VALUE;
12337 }
12338 }
12339
Thierry Strudel3d639192016-09-09 11:52:26 -070012340 return rc;
12341}
12342
12343/*===========================================================================
12344 * FUNCTION : captureResultCb
12345 *
12346 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12347 *
12348 * PARAMETERS :
12349 * @frame : frame information from mm-camera-interface
12350 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12351 * @userdata: userdata
12352 *
12353 * RETURN : NONE
12354 *==========================================================================*/
12355void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12356 camera3_stream_buffer_t *buffer,
12357 uint32_t frame_number, bool isInputBuffer, void *userdata)
12358{
12359 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12360 if (hw == NULL) {
12361 LOGE("Invalid hw %p", hw);
12362 return;
12363 }
12364
12365 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12366 return;
12367}
12368
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012369/*===========================================================================
12370 * FUNCTION : setBufferErrorStatus
12371 *
12372 * DESCRIPTION: Callback handler for channels to report any buffer errors
12373 *
12374 * PARAMETERS :
12375 * @ch : Channel on which buffer error is reported from
12376 * @frame_number : frame number on which buffer error is reported on
12377 * @buffer_status : buffer error status
12378 * @userdata: userdata
12379 *
12380 * RETURN : NONE
12381 *==========================================================================*/
12382void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12383 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12384{
12385 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12386 if (hw == NULL) {
12387 LOGE("Invalid hw %p", hw);
12388 return;
12389 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012390
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012391 hw->setBufferErrorStatus(ch, frame_number, err);
12392 return;
12393}
12394
12395void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12396 uint32_t frameNumber, camera3_buffer_status_t err)
12397{
12398 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12399 pthread_mutex_lock(&mMutex);
12400
12401 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12402 if (req.frame_number != frameNumber)
12403 continue;
12404 for (auto& k : req.mPendingBufferList) {
12405 if(k.stream->priv == ch) {
12406 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12407 }
12408 }
12409 }
12410
12411 pthread_mutex_unlock(&mMutex);
12412 return;
12413}
Thierry Strudel3d639192016-09-09 11:52:26 -070012414/*===========================================================================
12415 * FUNCTION : initialize
12416 *
12417 * DESCRIPTION: Pass framework callback pointers to HAL
12418 *
12419 * PARAMETERS :
12420 *
12421 *
12422 * RETURN : Success : 0
12423 * Failure: -ENODEV
12424 *==========================================================================*/
12425
12426int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12427 const camera3_callback_ops_t *callback_ops)
12428{
12429 LOGD("E");
12430 QCamera3HardwareInterface *hw =
12431 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12432 if (!hw) {
12433 LOGE("NULL camera device");
12434 return -ENODEV;
12435 }
12436
12437 int rc = hw->initialize(callback_ops);
12438 LOGD("X");
12439 return rc;
12440}
12441
12442/*===========================================================================
12443 * FUNCTION : configure_streams
12444 *
12445 * DESCRIPTION:
12446 *
12447 * PARAMETERS :
12448 *
12449 *
12450 * RETURN : Success: 0
12451 * Failure: -EINVAL (if stream configuration is invalid)
12452 * -ENODEV (fatal error)
12453 *==========================================================================*/
12454
12455int QCamera3HardwareInterface::configure_streams(
12456 const struct camera3_device *device,
12457 camera3_stream_configuration_t *stream_list)
12458{
12459 LOGD("E");
12460 QCamera3HardwareInterface *hw =
12461 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12462 if (!hw) {
12463 LOGE("NULL camera device");
12464 return -ENODEV;
12465 }
12466 int rc = hw->configureStreams(stream_list);
12467 LOGD("X");
12468 return rc;
12469}
12470
12471/*===========================================================================
12472 * FUNCTION : construct_default_request_settings
12473 *
12474 * DESCRIPTION: Configure a settings buffer to meet the required use case
12475 *
12476 * PARAMETERS :
12477 *
12478 *
12479 * RETURN : Success: Return valid metadata
12480 * Failure: Return NULL
12481 *==========================================================================*/
12482const camera_metadata_t* QCamera3HardwareInterface::
12483 construct_default_request_settings(const struct camera3_device *device,
12484 int type)
12485{
12486
12487 LOGD("E");
12488 camera_metadata_t* fwk_metadata = NULL;
12489 QCamera3HardwareInterface *hw =
12490 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12491 if (!hw) {
12492 LOGE("NULL camera device");
12493 return NULL;
12494 }
12495
12496 fwk_metadata = hw->translateCapabilityToMetadata(type);
12497
12498 LOGD("X");
12499 return fwk_metadata;
12500}
12501
12502/*===========================================================================
12503 * FUNCTION : process_capture_request
12504 *
12505 * DESCRIPTION:
12506 *
12507 * PARAMETERS :
12508 *
12509 *
12510 * RETURN :
12511 *==========================================================================*/
12512int QCamera3HardwareInterface::process_capture_request(
12513 const struct camera3_device *device,
12514 camera3_capture_request_t *request)
12515{
12516 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012517 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012518 QCamera3HardwareInterface *hw =
12519 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12520 if (!hw) {
12521 LOGE("NULL camera device");
12522 return -EINVAL;
12523 }
12524
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012525 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012526 LOGD("X");
12527 return rc;
12528}
12529
12530/*===========================================================================
12531 * FUNCTION : dump
12532 *
12533 * DESCRIPTION:
12534 *
12535 * PARAMETERS :
12536 *
12537 *
12538 * RETURN :
12539 *==========================================================================*/
12540
12541void QCamera3HardwareInterface::dump(
12542 const struct camera3_device *device, int fd)
12543{
12544 /* Log level property is read when "adb shell dumpsys media.camera" is
12545 called so that the log level can be controlled without restarting
12546 the media server */
12547 getLogLevel();
12548
12549 LOGD("E");
12550 QCamera3HardwareInterface *hw =
12551 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12552 if (!hw) {
12553 LOGE("NULL camera device");
12554 return;
12555 }
12556
12557 hw->dump(fd);
12558 LOGD("X");
12559 return;
12560}
12561
12562/*===========================================================================
12563 * FUNCTION : flush
12564 *
12565 * DESCRIPTION:
12566 *
12567 * PARAMETERS :
12568 *
12569 *
12570 * RETURN :
12571 *==========================================================================*/
12572
12573int QCamera3HardwareInterface::flush(
12574 const struct camera3_device *device)
12575{
12576 int rc;
12577 LOGD("E");
12578 QCamera3HardwareInterface *hw =
12579 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12580 if (!hw) {
12581 LOGE("NULL camera device");
12582 return -EINVAL;
12583 }
12584
12585 pthread_mutex_lock(&hw->mMutex);
12586 // Validate current state
12587 switch (hw->mState) {
12588 case STARTED:
12589 /* valid state */
12590 break;
12591
12592 case ERROR:
12593 pthread_mutex_unlock(&hw->mMutex);
12594 hw->handleCameraDeviceError();
12595 return -ENODEV;
12596
12597 default:
12598 LOGI("Flush returned during state %d", hw->mState);
12599 pthread_mutex_unlock(&hw->mMutex);
12600 return 0;
12601 }
12602 pthread_mutex_unlock(&hw->mMutex);
12603
12604 rc = hw->flush(true /* restart channels */ );
12605 LOGD("X");
12606 return rc;
12607}
12608
12609/*===========================================================================
12610 * FUNCTION : close_camera_device
12611 *
12612 * DESCRIPTION:
12613 *
12614 * PARAMETERS :
12615 *
12616 *
12617 * RETURN :
12618 *==========================================================================*/
12619int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12620{
12621 int ret = NO_ERROR;
12622 QCamera3HardwareInterface *hw =
12623 reinterpret_cast<QCamera3HardwareInterface *>(
12624 reinterpret_cast<camera3_device_t *>(device)->priv);
12625 if (!hw) {
12626 LOGE("NULL camera device");
12627 return BAD_VALUE;
12628 }
12629
12630 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12631 delete hw;
12632 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012633 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012634 return ret;
12635}
12636
12637/*===========================================================================
12638 * FUNCTION : getWaveletDenoiseProcessPlate
12639 *
12640 * DESCRIPTION: query wavelet denoise process plate
12641 *
12642 * PARAMETERS : None
12643 *
12644 * RETURN : WNR prcocess plate value
12645 *==========================================================================*/
12646cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12647{
12648 char prop[PROPERTY_VALUE_MAX];
12649 memset(prop, 0, sizeof(prop));
12650 property_get("persist.denoise.process.plates", prop, "0");
12651 int processPlate = atoi(prop);
12652 switch(processPlate) {
12653 case 0:
12654 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12655 case 1:
12656 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12657 case 2:
12658 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12659 case 3:
12660 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12661 default:
12662 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12663 }
12664}
12665
12666
12667/*===========================================================================
12668 * FUNCTION : getTemporalDenoiseProcessPlate
12669 *
12670 * DESCRIPTION: query temporal denoise process plate
12671 *
12672 * PARAMETERS : None
12673 *
12674 * RETURN : TNR prcocess plate value
12675 *==========================================================================*/
12676cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12677{
12678 char prop[PROPERTY_VALUE_MAX];
12679 memset(prop, 0, sizeof(prop));
12680 property_get("persist.tnr.process.plates", prop, "0");
12681 int processPlate = atoi(prop);
12682 switch(processPlate) {
12683 case 0:
12684 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12685 case 1:
12686 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12687 case 2:
12688 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12689 case 3:
12690 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12691 default:
12692 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12693 }
12694}
12695
12696
12697/*===========================================================================
12698 * FUNCTION : extractSceneMode
12699 *
12700 * DESCRIPTION: Extract scene mode from frameworks set metadata
12701 *
12702 * PARAMETERS :
12703 * @frame_settings: CameraMetadata reference
12704 * @metaMode: ANDROID_CONTORL_MODE
12705 * @hal_metadata: hal metadata structure
12706 *
12707 * RETURN : None
12708 *==========================================================================*/
12709int32_t QCamera3HardwareInterface::extractSceneMode(
12710 const CameraMetadata &frame_settings, uint8_t metaMode,
12711 metadata_buffer_t *hal_metadata)
12712{
12713 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012714 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12715
12716 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12717 LOGD("Ignoring control mode OFF_KEEP_STATE");
12718 return NO_ERROR;
12719 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012720
12721 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12722 camera_metadata_ro_entry entry =
12723 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12724 if (0 == entry.count)
12725 return rc;
12726
12727 uint8_t fwk_sceneMode = entry.data.u8[0];
12728
12729 int val = lookupHalName(SCENE_MODES_MAP,
12730 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12731 fwk_sceneMode);
12732 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012733 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012734 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012735 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012736 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012737
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012738 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12739 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12740 }
12741
12742 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12743 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012744 cam_hdr_param_t hdr_params;
12745 hdr_params.hdr_enable = 1;
12746 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12747 hdr_params.hdr_need_1x = false;
12748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12749 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12750 rc = BAD_VALUE;
12751 }
12752 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012753
Thierry Strudel3d639192016-09-09 11:52:26 -070012754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12755 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12756 rc = BAD_VALUE;
12757 }
12758 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012759
12760 if (mForceHdrSnapshot) {
12761 cam_hdr_param_t hdr_params;
12762 hdr_params.hdr_enable = 1;
12763 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12764 hdr_params.hdr_need_1x = false;
12765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12766 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770
Thierry Strudel3d639192016-09-09 11:52:26 -070012771 return rc;
12772}
12773
12774/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012775 * FUNCTION : setVideoHdrMode
12776 *
12777 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12778 *
12779 * PARAMETERS :
12780 * @hal_metadata: hal metadata structure
12781 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12782 *
12783 * RETURN : None
12784 *==========================================================================*/
12785int32_t QCamera3HardwareInterface::setVideoHdrMode(
12786 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12787{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012788 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12789 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12790 }
12791
12792 LOGE("Invalid Video HDR mode %d!", vhdr);
12793 return BAD_VALUE;
12794}
12795
12796/*===========================================================================
12797 * FUNCTION : setSensorHDR
12798 *
12799 * DESCRIPTION: Enable/disable sensor HDR.
12800 *
12801 * PARAMETERS :
12802 * @hal_metadata: hal metadata structure
12803 * @enable: boolean whether to enable/disable sensor HDR
12804 *
12805 * RETURN : None
12806 *==========================================================================*/
12807int32_t QCamera3HardwareInterface::setSensorHDR(
12808 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12809{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012810 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012811 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12812
12813 if (enable) {
12814 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12815 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12816 #ifdef _LE_CAMERA_
12817 //Default to staggered HDR for IOT
12818 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12819 #else
12820 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12821 #endif
12822 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12823 }
12824
12825 bool isSupported = false;
12826 switch (sensor_hdr) {
12827 case CAM_SENSOR_HDR_IN_SENSOR:
12828 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12829 CAM_QCOM_FEATURE_SENSOR_HDR) {
12830 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012831 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012832 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012833 break;
12834 case CAM_SENSOR_HDR_ZIGZAG:
12835 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12836 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12837 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012838 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012839 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012840 break;
12841 case CAM_SENSOR_HDR_STAGGERED:
12842 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12843 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12844 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012845 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012846 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012847 break;
12848 case CAM_SENSOR_HDR_OFF:
12849 isSupported = true;
12850 LOGD("Turning off sensor HDR");
12851 break;
12852 default:
12853 LOGE("HDR mode %d not supported", sensor_hdr);
12854 rc = BAD_VALUE;
12855 break;
12856 }
12857
12858 if(isSupported) {
12859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12860 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12861 rc = BAD_VALUE;
12862 } else {
12863 if(!isVideoHdrEnable)
12864 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012865 }
12866 }
12867 return rc;
12868}
12869
12870/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012871 * FUNCTION : needRotationReprocess
12872 *
12873 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12874 *
12875 * PARAMETERS : none
12876 *
12877 * RETURN : true: needed
12878 * false: no need
12879 *==========================================================================*/
12880bool QCamera3HardwareInterface::needRotationReprocess()
12881{
12882 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12883 // current rotation is not zero, and pp has the capability to process rotation
12884 LOGH("need do reprocess for rotation");
12885 return true;
12886 }
12887
12888 return false;
12889}
12890
12891/*===========================================================================
12892 * FUNCTION : needReprocess
12893 *
12894 * DESCRIPTION: if reprocess in needed
12895 *
12896 * PARAMETERS : none
12897 *
12898 * RETURN : true: needed
12899 * false: no need
12900 *==========================================================================*/
12901bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12902{
12903 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12904 // TODO: add for ZSL HDR later
12905 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12906 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12907 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12908 return true;
12909 } else {
12910 LOGH("already post processed frame");
12911 return false;
12912 }
12913 }
12914 return needRotationReprocess();
12915}
12916
12917/*===========================================================================
12918 * FUNCTION : needJpegExifRotation
12919 *
12920 * DESCRIPTION: if rotation from jpeg is needed
12921 *
12922 * PARAMETERS : none
12923 *
12924 * RETURN : true: needed
12925 * false: no need
12926 *==========================================================================*/
12927bool QCamera3HardwareInterface::needJpegExifRotation()
12928{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012929 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012930 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12931 LOGD("Need use Jpeg EXIF Rotation");
12932 return true;
12933 }
12934 return false;
12935}
12936
12937/*===========================================================================
12938 * FUNCTION : addOfflineReprocChannel
12939 *
12940 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12941 * coming from input channel
12942 *
12943 * PARAMETERS :
12944 * @config : reprocess configuration
12945 * @inputChHandle : pointer to the input (source) channel
12946 *
12947 *
12948 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12949 *==========================================================================*/
12950QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12951 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12952{
12953 int32_t rc = NO_ERROR;
12954 QCamera3ReprocessChannel *pChannel = NULL;
12955
12956 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012957 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12958 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012959 if (NULL == pChannel) {
12960 LOGE("no mem for reprocess channel");
12961 return NULL;
12962 }
12963
12964 rc = pChannel->initialize(IS_TYPE_NONE);
12965 if (rc != NO_ERROR) {
12966 LOGE("init reprocess channel failed, ret = %d", rc);
12967 delete pChannel;
12968 return NULL;
12969 }
12970
12971 // pp feature config
12972 cam_pp_feature_config_t pp_config;
12973 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12974
12975 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12976 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12977 & CAM_QCOM_FEATURE_DSDN) {
12978 //Use CPP CDS incase h/w supports it.
12979 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12980 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12981 }
12982 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12983 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12984 }
12985
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012986 if (config.hdr_param.hdr_enable) {
12987 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12988 pp_config.hdr_param = config.hdr_param;
12989 }
12990
12991 if (mForceHdrSnapshot) {
12992 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12993 pp_config.hdr_param.hdr_enable = 1;
12994 pp_config.hdr_param.hdr_need_1x = 0;
12995 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12996 }
12997
Thierry Strudel3d639192016-09-09 11:52:26 -070012998 rc = pChannel->addReprocStreamsFromSource(pp_config,
12999 config,
13000 IS_TYPE_NONE,
13001 mMetadataChannel);
13002
13003 if (rc != NO_ERROR) {
13004 delete pChannel;
13005 return NULL;
13006 }
13007 return pChannel;
13008}
13009
13010/*===========================================================================
13011 * FUNCTION : getMobicatMask
13012 *
13013 * DESCRIPTION: returns mobicat mask
13014 *
13015 * PARAMETERS : none
13016 *
13017 * RETURN : mobicat mask
13018 *
13019 *==========================================================================*/
13020uint8_t QCamera3HardwareInterface::getMobicatMask()
13021{
13022 return m_MobicatMask;
13023}
13024
13025/*===========================================================================
13026 * FUNCTION : setMobicat
13027 *
13028 * DESCRIPTION: set Mobicat on/off.
13029 *
13030 * PARAMETERS :
13031 * @params : none
13032 *
13033 * RETURN : int32_t type of status
13034 * NO_ERROR -- success
13035 * none-zero failure code
13036 *==========================================================================*/
13037int32_t QCamera3HardwareInterface::setMobicat()
13038{
13039 char value [PROPERTY_VALUE_MAX];
13040 property_get("persist.camera.mobicat", value, "0");
13041 int32_t ret = NO_ERROR;
13042 uint8_t enableMobi = (uint8_t)atoi(value);
13043
13044 if (enableMobi) {
13045 tune_cmd_t tune_cmd;
13046 tune_cmd.type = SET_RELOAD_CHROMATIX;
13047 tune_cmd.module = MODULE_ALL;
13048 tune_cmd.value = TRUE;
13049 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13050 CAM_INTF_PARM_SET_VFE_COMMAND,
13051 tune_cmd);
13052
13053 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13054 CAM_INTF_PARM_SET_PP_COMMAND,
13055 tune_cmd);
13056 }
13057 m_MobicatMask = enableMobi;
13058
13059 return ret;
13060}
13061
13062/*===========================================================================
13063* FUNCTION : getLogLevel
13064*
13065* DESCRIPTION: Reads the log level property into a variable
13066*
13067* PARAMETERS :
13068* None
13069*
13070* RETURN :
13071* None
13072*==========================================================================*/
13073void QCamera3HardwareInterface::getLogLevel()
13074{
13075 char prop[PROPERTY_VALUE_MAX];
13076 uint32_t globalLogLevel = 0;
13077
13078 property_get("persist.camera.hal.debug", prop, "0");
13079 int val = atoi(prop);
13080 if (0 <= val) {
13081 gCamHal3LogLevel = (uint32_t)val;
13082 }
13083
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013084 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013085 gKpiDebugLevel = atoi(prop);
13086
13087 property_get("persist.camera.global.debug", prop, "0");
13088 val = atoi(prop);
13089 if (0 <= val) {
13090 globalLogLevel = (uint32_t)val;
13091 }
13092
13093 /* Highest log level among hal.logs and global.logs is selected */
13094 if (gCamHal3LogLevel < globalLogLevel)
13095 gCamHal3LogLevel = globalLogLevel;
13096
13097 return;
13098}
13099
13100/*===========================================================================
13101 * FUNCTION : validateStreamRotations
13102 *
13103 * DESCRIPTION: Check if the rotations requested are supported
13104 *
13105 * PARAMETERS :
13106 * @stream_list : streams to be configured
13107 *
13108 * RETURN : NO_ERROR on success
13109 * -EINVAL on failure
13110 *
13111 *==========================================================================*/
13112int QCamera3HardwareInterface::validateStreamRotations(
13113 camera3_stream_configuration_t *streamList)
13114{
13115 int rc = NO_ERROR;
13116
13117 /*
13118 * Loop through all streams requested in configuration
13119 * Check if unsupported rotations have been requested on any of them
13120 */
13121 for (size_t j = 0; j < streamList->num_streams; j++){
13122 camera3_stream_t *newStream = streamList->streams[j];
13123
13124 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13125 bool isImplDef = (newStream->format ==
13126 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13127 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13128 isImplDef);
13129
13130 if (isRotated && (!isImplDef || isZsl)) {
13131 LOGE("Error: Unsupported rotation of %d requested for stream"
13132 "type:%d and stream format:%d",
13133 newStream->rotation, newStream->stream_type,
13134 newStream->format);
13135 rc = -EINVAL;
13136 break;
13137 }
13138 }
13139
13140 return rc;
13141}
13142
13143/*===========================================================================
13144* FUNCTION : getFlashInfo
13145*
13146* DESCRIPTION: Retrieve information about whether the device has a flash.
13147*
13148* PARAMETERS :
13149* @cameraId : Camera id to query
13150* @hasFlash : Boolean indicating whether there is a flash device
13151* associated with given camera
13152* @flashNode : If a flash device exists, this will be its device node.
13153*
13154* RETURN :
13155* None
13156*==========================================================================*/
13157void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13158 bool& hasFlash,
13159 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13160{
13161 cam_capability_t* camCapability = gCamCapability[cameraId];
13162 if (NULL == camCapability) {
13163 hasFlash = false;
13164 flashNode[0] = '\0';
13165 } else {
13166 hasFlash = camCapability->flash_available;
13167 strlcpy(flashNode,
13168 (char*)camCapability->flash_dev_name,
13169 QCAMERA_MAX_FILEPATH_LENGTH);
13170 }
13171}
13172
13173/*===========================================================================
13174* FUNCTION : getEepromVersionInfo
13175*
13176* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13177*
13178* PARAMETERS : None
13179*
13180* RETURN : string describing EEPROM version
13181* "\0" if no such info available
13182*==========================================================================*/
13183const char *QCamera3HardwareInterface::getEepromVersionInfo()
13184{
13185 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13186}
13187
13188/*===========================================================================
13189* FUNCTION : getLdafCalib
13190*
13191* DESCRIPTION: Retrieve Laser AF calibration data
13192*
13193* PARAMETERS : None
13194*
13195* RETURN : Two uint32_t describing laser AF calibration data
13196* NULL if none is available.
13197*==========================================================================*/
13198const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13199{
13200 if (mLdafCalibExist) {
13201 return &mLdafCalib[0];
13202 } else {
13203 return NULL;
13204 }
13205}
13206
13207/*===========================================================================
13208 * FUNCTION : dynamicUpdateMetaStreamInfo
13209 *
13210 * DESCRIPTION: This function:
13211 * (1) stops all the channels
13212 * (2) returns error on pending requests and buffers
13213 * (3) sends metastream_info in setparams
13214 * (4) starts all channels
13215 * This is useful when sensor has to be restarted to apply any
13216 * settings such as frame rate from a different sensor mode
13217 *
13218 * PARAMETERS : None
13219 *
13220 * RETURN : NO_ERROR on success
13221 * Error codes on failure
13222 *
13223 *==========================================================================*/
13224int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13225{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013226 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013227 int rc = NO_ERROR;
13228
13229 LOGD("E");
13230
13231 rc = stopAllChannels();
13232 if (rc < 0) {
13233 LOGE("stopAllChannels failed");
13234 return rc;
13235 }
13236
13237 rc = notifyErrorForPendingRequests();
13238 if (rc < 0) {
13239 LOGE("notifyErrorForPendingRequests failed");
13240 return rc;
13241 }
13242
13243 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13244 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13245 "Format:%d",
13246 mStreamConfigInfo.type[i],
13247 mStreamConfigInfo.stream_sizes[i].width,
13248 mStreamConfigInfo.stream_sizes[i].height,
13249 mStreamConfigInfo.postprocess_mask[i],
13250 mStreamConfigInfo.format[i]);
13251 }
13252
13253 /* Send meta stream info once again so that ISP can start */
13254 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13255 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13256 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13257 mParameters);
13258 if (rc < 0) {
13259 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13260 }
13261
13262 rc = startAllChannels();
13263 if (rc < 0) {
13264 LOGE("startAllChannels failed");
13265 return rc;
13266 }
13267
13268 LOGD("X");
13269 return rc;
13270}
13271
13272/*===========================================================================
13273 * FUNCTION : stopAllChannels
13274 *
13275 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13276 *
13277 * PARAMETERS : None
13278 *
13279 * RETURN : NO_ERROR on success
13280 * Error codes on failure
13281 *
13282 *==========================================================================*/
13283int32_t QCamera3HardwareInterface::stopAllChannels()
13284{
13285 int32_t rc = NO_ERROR;
13286
13287 LOGD("Stopping all channels");
13288 // Stop the Streams/Channels
13289 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13290 it != mStreamInfo.end(); it++) {
13291 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13292 if (channel) {
13293 channel->stop();
13294 }
13295 (*it)->status = INVALID;
13296 }
13297
13298 if (mSupportChannel) {
13299 mSupportChannel->stop();
13300 }
13301 if (mAnalysisChannel) {
13302 mAnalysisChannel->stop();
13303 }
13304 if (mRawDumpChannel) {
13305 mRawDumpChannel->stop();
13306 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013307 if (mHdrPlusRawSrcChannel) {
13308 mHdrPlusRawSrcChannel->stop();
13309 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013310 if (mMetadataChannel) {
13311 /* If content of mStreamInfo is not 0, there is metadata stream */
13312 mMetadataChannel->stop();
13313 }
13314
13315 LOGD("All channels stopped");
13316 return rc;
13317}
13318
13319/*===========================================================================
13320 * FUNCTION : startAllChannels
13321 *
13322 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13323 *
13324 * PARAMETERS : None
13325 *
13326 * RETURN : NO_ERROR on success
13327 * Error codes on failure
13328 *
13329 *==========================================================================*/
13330int32_t QCamera3HardwareInterface::startAllChannels()
13331{
13332 int32_t rc = NO_ERROR;
13333
13334 LOGD("Start all channels ");
13335 // Start the Streams/Channels
13336 if (mMetadataChannel) {
13337 /* If content of mStreamInfo is not 0, there is metadata stream */
13338 rc = mMetadataChannel->start();
13339 if (rc < 0) {
13340 LOGE("META channel start failed");
13341 return rc;
13342 }
13343 }
13344 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13345 it != mStreamInfo.end(); it++) {
13346 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13347 if (channel) {
13348 rc = channel->start();
13349 if (rc < 0) {
13350 LOGE("channel start failed");
13351 return rc;
13352 }
13353 }
13354 }
13355 if (mAnalysisChannel) {
13356 mAnalysisChannel->start();
13357 }
13358 if (mSupportChannel) {
13359 rc = mSupportChannel->start();
13360 if (rc < 0) {
13361 LOGE("Support channel start failed");
13362 return rc;
13363 }
13364 }
13365 if (mRawDumpChannel) {
13366 rc = mRawDumpChannel->start();
13367 if (rc < 0) {
13368 LOGE("RAW dump channel start failed");
13369 return rc;
13370 }
13371 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013372 if (mHdrPlusRawSrcChannel) {
13373 rc = mHdrPlusRawSrcChannel->start();
13374 if (rc < 0) {
13375 LOGE("HDR+ RAW channel start failed");
13376 return rc;
13377 }
13378 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013379
13380 LOGD("All channels started");
13381 return rc;
13382}
13383
13384/*===========================================================================
13385 * FUNCTION : notifyErrorForPendingRequests
13386 *
13387 * DESCRIPTION: This function sends error for all the pending requests/buffers
13388 *
13389 * PARAMETERS : None
13390 *
13391 * RETURN : Error codes
13392 * NO_ERROR on success
13393 *
13394 *==========================================================================*/
13395int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13396{
13397 int32_t rc = NO_ERROR;
13398 unsigned int frameNum = 0;
13399 camera3_capture_result_t result;
13400 camera3_stream_buffer_t *pStream_Buf = NULL;
13401
13402 memset(&result, 0, sizeof(camera3_capture_result_t));
13403
13404 if (mPendingRequestsList.size() > 0) {
13405 pendingRequestIterator i = mPendingRequestsList.begin();
13406 frameNum = i->frame_number;
13407 } else {
13408 /* There might still be pending buffers even though there are
13409 no pending requests. Setting the frameNum to MAX so that
13410 all the buffers with smaller frame numbers are returned */
13411 frameNum = UINT_MAX;
13412 }
13413
13414 LOGH("Oldest frame num on mPendingRequestsList = %u",
13415 frameNum);
13416
Emilian Peev7650c122017-01-19 08:24:33 -080013417 notifyErrorFoPendingDepthData(mDepthChannel);
13418
Thierry Strudel3d639192016-09-09 11:52:26 -070013419 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13420 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13421
13422 if (req->frame_number < frameNum) {
13423 // Send Error notify to frameworks for each buffer for which
13424 // metadata buffer is already sent
13425 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13426 req->frame_number, req->mPendingBufferList.size());
13427
13428 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13429 if (NULL == pStream_Buf) {
13430 LOGE("No memory for pending buffers array");
13431 return NO_MEMORY;
13432 }
13433 memset(pStream_Buf, 0,
13434 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13435 result.result = NULL;
13436 result.frame_number = req->frame_number;
13437 result.num_output_buffers = req->mPendingBufferList.size();
13438 result.output_buffers = pStream_Buf;
13439
13440 size_t index = 0;
13441 for (auto info = req->mPendingBufferList.begin();
13442 info != req->mPendingBufferList.end(); ) {
13443
13444 camera3_notify_msg_t notify_msg;
13445 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13446 notify_msg.type = CAMERA3_MSG_ERROR;
13447 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13448 notify_msg.message.error.error_stream = info->stream;
13449 notify_msg.message.error.frame_number = req->frame_number;
13450 pStream_Buf[index].acquire_fence = -1;
13451 pStream_Buf[index].release_fence = -1;
13452 pStream_Buf[index].buffer = info->buffer;
13453 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13454 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013455 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013456 index++;
13457 // Remove buffer from list
13458 info = req->mPendingBufferList.erase(info);
13459 }
13460
13461 // Remove this request from Map
13462 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13463 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13464 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13465
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013466 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013467
13468 delete [] pStream_Buf;
13469 } else {
13470
13471 // Go through the pending requests info and send error request to framework
13472 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13473
13474 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13475
13476 // Send error notify to frameworks
13477 camera3_notify_msg_t notify_msg;
13478 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13479 notify_msg.type = CAMERA3_MSG_ERROR;
13480 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13481 notify_msg.message.error.error_stream = NULL;
13482 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013483 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013484
13485 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13486 if (NULL == pStream_Buf) {
13487 LOGE("No memory for pending buffers array");
13488 return NO_MEMORY;
13489 }
13490 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13491
13492 result.result = NULL;
13493 result.frame_number = req->frame_number;
13494 result.input_buffer = i->input_buffer;
13495 result.num_output_buffers = req->mPendingBufferList.size();
13496 result.output_buffers = pStream_Buf;
13497
13498 size_t index = 0;
13499 for (auto info = req->mPendingBufferList.begin();
13500 info != req->mPendingBufferList.end(); ) {
13501 pStream_Buf[index].acquire_fence = -1;
13502 pStream_Buf[index].release_fence = -1;
13503 pStream_Buf[index].buffer = info->buffer;
13504 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13505 pStream_Buf[index].stream = info->stream;
13506 index++;
13507 // Remove buffer from list
13508 info = req->mPendingBufferList.erase(info);
13509 }
13510
13511 // Remove this request from Map
13512 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13513 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13514 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13515
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013516 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013517 delete [] pStream_Buf;
13518 i = erasePendingRequest(i);
13519 }
13520 }
13521
13522 /* Reset pending frame Drop list and requests list */
13523 mPendingFrameDropList.clear();
13524
13525 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13526 req.mPendingBufferList.clear();
13527 }
13528 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013529 LOGH("Cleared all the pending buffers ");
13530
13531 return rc;
13532}
13533
13534bool QCamera3HardwareInterface::isOnEncoder(
13535 const cam_dimension_t max_viewfinder_size,
13536 uint32_t width, uint32_t height)
13537{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013538 return ((width > (uint32_t)max_viewfinder_size.width) ||
13539 (height > (uint32_t)max_viewfinder_size.height) ||
13540 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13541 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013542}
13543
13544/*===========================================================================
13545 * FUNCTION : setBundleInfo
13546 *
13547 * DESCRIPTION: Set bundle info for all streams that are bundle.
13548 *
13549 * PARAMETERS : None
13550 *
13551 * RETURN : NO_ERROR on success
13552 * Error codes on failure
13553 *==========================================================================*/
13554int32_t QCamera3HardwareInterface::setBundleInfo()
13555{
13556 int32_t rc = NO_ERROR;
13557
13558 if (mChannelHandle) {
13559 cam_bundle_config_t bundleInfo;
13560 memset(&bundleInfo, 0, sizeof(bundleInfo));
13561 rc = mCameraHandle->ops->get_bundle_info(
13562 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13563 if (rc != NO_ERROR) {
13564 LOGE("get_bundle_info failed");
13565 return rc;
13566 }
13567 if (mAnalysisChannel) {
13568 mAnalysisChannel->setBundleInfo(bundleInfo);
13569 }
13570 if (mSupportChannel) {
13571 mSupportChannel->setBundleInfo(bundleInfo);
13572 }
13573 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13574 it != mStreamInfo.end(); it++) {
13575 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13576 channel->setBundleInfo(bundleInfo);
13577 }
13578 if (mRawDumpChannel) {
13579 mRawDumpChannel->setBundleInfo(bundleInfo);
13580 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013581 if (mHdrPlusRawSrcChannel) {
13582 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13583 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013584 }
13585
13586 return rc;
13587}
13588
13589/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013590 * FUNCTION : setInstantAEC
13591 *
13592 * DESCRIPTION: Set Instant AEC related params.
13593 *
13594 * PARAMETERS :
13595 * @meta: CameraMetadata reference
13596 *
13597 * RETURN : NO_ERROR on success
13598 * Error codes on failure
13599 *==========================================================================*/
13600int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13601{
13602 int32_t rc = NO_ERROR;
13603 uint8_t val = 0;
13604 char prop[PROPERTY_VALUE_MAX];
13605
13606 // First try to configure instant AEC from framework metadata
13607 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13608 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13609 }
13610
13611 // If framework did not set this value, try to read from set prop.
13612 if (val == 0) {
13613 memset(prop, 0, sizeof(prop));
13614 property_get("persist.camera.instant.aec", prop, "0");
13615 val = (uint8_t)atoi(prop);
13616 }
13617
13618 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13619 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13620 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13621 mInstantAEC = val;
13622 mInstantAECSettledFrameNumber = 0;
13623 mInstantAecFrameIdxCount = 0;
13624 LOGH("instantAEC value set %d",val);
13625 if (mInstantAEC) {
13626 memset(prop, 0, sizeof(prop));
13627 property_get("persist.camera.ae.instant.bound", prop, "10");
13628 int32_t aec_frame_skip_cnt = atoi(prop);
13629 if (aec_frame_skip_cnt >= 0) {
13630 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13631 } else {
13632 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13633 rc = BAD_VALUE;
13634 }
13635 }
13636 } else {
13637 LOGE("Bad instant aec value set %d", val);
13638 rc = BAD_VALUE;
13639 }
13640 return rc;
13641}
13642
13643/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013644 * FUNCTION : get_num_overall_buffers
13645 *
13646 * DESCRIPTION: Estimate number of pending buffers across all requests.
13647 *
13648 * PARAMETERS : None
13649 *
13650 * RETURN : Number of overall pending buffers
13651 *
13652 *==========================================================================*/
13653uint32_t PendingBuffersMap::get_num_overall_buffers()
13654{
13655 uint32_t sum_buffers = 0;
13656 for (auto &req : mPendingBuffersInRequest) {
13657 sum_buffers += req.mPendingBufferList.size();
13658 }
13659 return sum_buffers;
13660}
13661
13662/*===========================================================================
13663 * FUNCTION : removeBuf
13664 *
13665 * DESCRIPTION: Remove a matching buffer from tracker.
13666 *
13667 * PARAMETERS : @buffer: image buffer for the callback
13668 *
13669 * RETURN : None
13670 *
13671 *==========================================================================*/
13672void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13673{
13674 bool buffer_found = false;
13675 for (auto req = mPendingBuffersInRequest.begin();
13676 req != mPendingBuffersInRequest.end(); req++) {
13677 for (auto k = req->mPendingBufferList.begin();
13678 k != req->mPendingBufferList.end(); k++ ) {
13679 if (k->buffer == buffer) {
13680 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13681 req->frame_number, buffer);
13682 k = req->mPendingBufferList.erase(k);
13683 if (req->mPendingBufferList.empty()) {
13684 // Remove this request from Map
13685 req = mPendingBuffersInRequest.erase(req);
13686 }
13687 buffer_found = true;
13688 break;
13689 }
13690 }
13691 if (buffer_found) {
13692 break;
13693 }
13694 }
13695 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13696 get_num_overall_buffers());
13697}
13698
13699/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013700 * FUNCTION : getBufErrStatus
13701 *
13702 * DESCRIPTION: get buffer error status
13703 *
13704 * PARAMETERS : @buffer: buffer handle
13705 *
13706 * RETURN : Error status
13707 *
13708 *==========================================================================*/
13709int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13710{
13711 for (auto& req : mPendingBuffersInRequest) {
13712 for (auto& k : req.mPendingBufferList) {
13713 if (k.buffer == buffer)
13714 return k.bufStatus;
13715 }
13716 }
13717 return CAMERA3_BUFFER_STATUS_OK;
13718}
13719
13720/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013721 * FUNCTION : setPAAFSupport
13722 *
13723 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13724 * feature mask according to stream type and filter
13725 * arrangement
13726 *
13727 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13728 * @stream_type: stream type
13729 * @filter_arrangement: filter arrangement
13730 *
13731 * RETURN : None
13732 *==========================================================================*/
13733void QCamera3HardwareInterface::setPAAFSupport(
13734 cam_feature_mask_t& feature_mask,
13735 cam_stream_type_t stream_type,
13736 cam_color_filter_arrangement_t filter_arrangement)
13737{
Thierry Strudel3d639192016-09-09 11:52:26 -070013738 switch (filter_arrangement) {
13739 case CAM_FILTER_ARRANGEMENT_RGGB:
13740 case CAM_FILTER_ARRANGEMENT_GRBG:
13741 case CAM_FILTER_ARRANGEMENT_GBRG:
13742 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013743 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13744 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013745 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013746 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13747 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013748 }
13749 break;
13750 case CAM_FILTER_ARRANGEMENT_Y:
13751 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13752 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13753 }
13754 break;
13755 default:
13756 break;
13757 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070013758 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13759 feature_mask, stream_type, filter_arrangement);
13760
13761
Thierry Strudel3d639192016-09-09 11:52:26 -070013762}
13763
13764/*===========================================================================
13765* FUNCTION : getSensorMountAngle
13766*
13767* DESCRIPTION: Retrieve sensor mount angle
13768*
13769* PARAMETERS : None
13770*
13771* RETURN : sensor mount angle in uint32_t
13772*==========================================================================*/
13773uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13774{
13775 return gCamCapability[mCameraId]->sensor_mount_angle;
13776}
13777
13778/*===========================================================================
13779* FUNCTION : getRelatedCalibrationData
13780*
13781* DESCRIPTION: Retrieve related system calibration data
13782*
13783* PARAMETERS : None
13784*
13785* RETURN : Pointer of related system calibration data
13786*==========================================================================*/
13787const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13788{
13789 return (const cam_related_system_calibration_data_t *)
13790 &(gCamCapability[mCameraId]->related_cam_calibration);
13791}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013792
13793/*===========================================================================
13794 * FUNCTION : is60HzZone
13795 *
13796 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13797 *
13798 * PARAMETERS : None
13799 *
13800 * RETURN : True if in 60Hz zone, False otherwise
13801 *==========================================================================*/
13802bool QCamera3HardwareInterface::is60HzZone()
13803{
13804 time_t t = time(NULL);
13805 struct tm lt;
13806
13807 struct tm* r = localtime_r(&t, &lt);
13808
13809 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13810 return true;
13811 else
13812 return false;
13813}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013814
13815/*===========================================================================
13816 * FUNCTION : adjustBlackLevelForCFA
13817 *
13818 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13819 * of bayer CFA (Color Filter Array).
13820 *
13821 * PARAMETERS : @input: black level pattern in the order of RGGB
13822 * @output: black level pattern in the order of CFA
13823 * @color_arrangement: CFA color arrangement
13824 *
13825 * RETURN : None
13826 *==========================================================================*/
13827template<typename T>
13828void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13829 T input[BLACK_LEVEL_PATTERN_CNT],
13830 T output[BLACK_LEVEL_PATTERN_CNT],
13831 cam_color_filter_arrangement_t color_arrangement)
13832{
13833 switch (color_arrangement) {
13834 case CAM_FILTER_ARRANGEMENT_GRBG:
13835 output[0] = input[1];
13836 output[1] = input[0];
13837 output[2] = input[3];
13838 output[3] = input[2];
13839 break;
13840 case CAM_FILTER_ARRANGEMENT_GBRG:
13841 output[0] = input[2];
13842 output[1] = input[3];
13843 output[2] = input[0];
13844 output[3] = input[1];
13845 break;
13846 case CAM_FILTER_ARRANGEMENT_BGGR:
13847 output[0] = input[3];
13848 output[1] = input[2];
13849 output[2] = input[1];
13850 output[3] = input[0];
13851 break;
13852 case CAM_FILTER_ARRANGEMENT_RGGB:
13853 output[0] = input[0];
13854 output[1] = input[1];
13855 output[2] = input[2];
13856 output[3] = input[3];
13857 break;
13858 default:
13859 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13860 break;
13861 }
13862}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013863
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013864void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13865 CameraMetadata &resultMetadata,
13866 std::shared_ptr<metadata_buffer_t> settings)
13867{
13868 if (settings == nullptr) {
13869 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13870 return;
13871 }
13872
13873 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13874 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13875 }
13876
13877 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13878 String8 str((const char *)gps_methods);
13879 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13880 }
13881
13882 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13883 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13884 }
13885
13886 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13887 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13888 }
13889
13890 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13891 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13892 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13893 }
13894
13895 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13896 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13897 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13898 }
13899
13900 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13901 int32_t fwk_thumb_size[2];
13902 fwk_thumb_size[0] = thumb_size->width;
13903 fwk_thumb_size[1] = thumb_size->height;
13904 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13905 }
13906
13907 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13908 uint8_t fwk_intent = intent[0];
13909 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13910 }
13911}
13912
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013913bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13914 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13915{
13916 if (hdrPlusRequest == nullptr) return false;
13917
13918 // Check noise reduction mode is high quality.
13919 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13920 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13921 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013922 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13923 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013924 return false;
13925 }
13926
13927 // Check edge mode is high quality.
13928 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13929 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13930 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13931 return false;
13932 }
13933
13934 if (request.num_output_buffers != 1 ||
13935 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13936 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013937 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13938 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13939 request.output_buffers[0].stream->width,
13940 request.output_buffers[0].stream->height,
13941 request.output_buffers[0].stream->format);
13942 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013943 return false;
13944 }
13945
13946 // Get a YUV buffer from pic channel.
13947 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13948 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13949 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13950 if (res != OK) {
13951 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13952 __FUNCTION__, strerror(-res), res);
13953 return false;
13954 }
13955
13956 pbcamera::StreamBuffer buffer;
13957 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080013958 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013959 buffer.data = yuvBuffer->buffer;
13960 buffer.dataSize = yuvBuffer->frame_len;
13961
13962 pbcamera::CaptureRequest pbRequest;
13963 pbRequest.id = request.frame_number;
13964 pbRequest.outputBuffers.push_back(buffer);
13965
13966 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013967 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013968 if (res != OK) {
13969 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13970 strerror(-res), res);
13971 return false;
13972 }
13973
13974 hdrPlusRequest->yuvBuffer = yuvBuffer;
13975 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13976
13977 return true;
13978}
13979
Chien-Yu Chenee335912017-02-09 17:53:20 -080013980status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13981{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013982 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013983 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13984 return -ENODEV;
13985 }
13986
13987 // Connect to HDR+ service
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013988 status_t res = gHdrPlusClient->connect(this);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013989 if (res != OK) {
13990 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
13991 strerror(-res), res);
13992 return res;
13993 }
13994
13995 // Set static metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013996 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013997 if (res != OK) {
13998 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
13999 strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014000 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080014001 return res;
14002 }
14003
14004 // Configure stream for HDR+.
14005 res = configureHdrPlusStreamsLocked();
14006 if (res != OK) {
14007 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014008 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080014009 return res;
14010 }
14011
14012 mHdrPlusModeEnabled = true;
14013 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14014
14015 return OK;
14016}
14017
14018void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14019{
14020 // Disconnect from HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014021 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
14022 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080014023 }
14024
14025 mHdrPlusModeEnabled = false;
14026 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14027}
14028
14029status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014030{
14031 pbcamera::InputConfiguration inputConfig;
14032 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14033 status_t res = OK;
14034
14035 // Configure HDR+ client streams.
14036 // Get input config.
14037 if (mHdrPlusRawSrcChannel) {
14038 // HDR+ input buffers will be provided by HAL.
14039 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14040 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14041 if (res != OK) {
14042 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14043 __FUNCTION__, strerror(-res), res);
14044 return res;
14045 }
14046
14047 inputConfig.isSensorInput = false;
14048 } else {
14049 // Sensor MIPI will send data to Easel.
14050 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014051 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014052 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14053 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14054 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14055 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14056 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14057 if (mSensorModeInfo.num_raw_bits != 10) {
14058 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14059 mSensorModeInfo.num_raw_bits);
14060 return BAD_VALUE;
14061 }
14062
14063 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014064 }
14065
14066 // Get output configurations.
14067 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014068 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014069
14070 // Easel may need to output YUV output buffers if mPictureChannel was created.
14071 pbcamera::StreamConfiguration yuvOutputConfig;
14072 if (mPictureChannel != nullptr) {
14073 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14074 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14075 if (res != OK) {
14076 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14077 __FUNCTION__, strerror(-res), res);
14078
14079 return res;
14080 }
14081
14082 outputStreamConfigs.push_back(yuvOutputConfig);
14083 }
14084
14085 // TODO: consider other channels for YUV output buffers.
14086
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014087 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014088 if (res != OK) {
14089 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14090 strerror(-res), res);
14091 return res;
14092 }
14093
14094 return OK;
14095}
14096
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014097void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14098 const camera_metadata_t &resultMetadata) {
14099 if (result != nullptr) {
14100 if (result->outputBuffers.size() != 1) {
14101 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14102 result->outputBuffers.size());
14103 return;
14104 }
14105
14106 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14107 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14108 result->outputBuffers[0].streamId);
14109 return;
14110 }
14111
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014112 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014113 HdrPlusPendingRequest pendingRequest;
14114 {
14115 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14116 auto req = mHdrPlusPendingRequests.find(result->requestId);
14117 pendingRequest = req->second;
14118 }
14119
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014120 // Update the result metadata with the settings of the HDR+ still capture request because
14121 // the result metadata belongs to a ZSL buffer.
14122 CameraMetadata metadata;
14123 metadata = &resultMetadata;
14124 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14125 camera_metadata_t* updatedResultMetadata = metadata.release();
14126
14127 QCamera3PicChannel *picChannel =
14128 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14129
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014130 // Check if dumping HDR+ YUV output is enabled.
14131 char prop[PROPERTY_VALUE_MAX];
14132 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14133 bool dumpYuvOutput = atoi(prop);
14134
14135 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014136 // Dump yuv buffer to a ppm file.
14137 pbcamera::StreamConfiguration outputConfig;
14138 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14139 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14140 if (rc == OK) {
14141 char buf[FILENAME_MAX] = {};
14142 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14143 result->requestId, result->outputBuffers[0].streamId,
14144 outputConfig.image.width, outputConfig.image.height);
14145
14146 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14147 } else {
14148 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14149 __FUNCTION__, strerror(-rc), rc);
14150 }
14151 }
14152
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014153 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14154 auto halMetadata = std::make_shared<metadata_buffer_t>();
14155 clear_metadata_buffer(halMetadata.get());
14156
14157 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14158 // encoding.
14159 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14160 halStreamId, /*minFrameDuration*/0);
14161 if (res == OK) {
14162 // Return the buffer to pic channel for encoding.
14163 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14164 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14165 halMetadata);
14166 } else {
14167 // Return the buffer without encoding.
14168 // TODO: This should not happen but we may want to report an error buffer to camera
14169 // service.
14170 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14171 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14172 strerror(-res), res);
14173 }
14174
14175 // Send HDR+ metadata to framework.
14176 {
14177 pthread_mutex_lock(&mMutex);
14178
14179 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14180 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14181 pthread_mutex_unlock(&mMutex);
14182 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014183
14184 // Remove the HDR+ pending request.
14185 {
14186 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14187 auto req = mHdrPlusPendingRequests.find(result->requestId);
14188 mHdrPlusPendingRequests.erase(req);
14189 }
14190 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014191}
14192
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014193void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14194 // TODO: Handle HDR+ capture failures and send the failure to framework.
14195 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14196 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14197
14198 // Return the buffer to pic channel.
14199 QCamera3PicChannel *picChannel =
14200 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14201 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14202
14203 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014204}
14205
Thierry Strudel3d639192016-09-09 11:52:26 -070014206}; //end namespace qcamera