blob: 3b28f8a7436cdce4ad1e20e2675a689e4606f5af [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
Jason Leeb9e76432017-03-10 17:14:19 -080084#define MAX_EIS_WIDTH 3840
85#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070086
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800130// HDR+ client instance. If null, Easel was not detected on this device.
131// Note that this doesn't support concurrent front and back camera b/35960155.
132std::shared_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
133// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
134bool gEaselBypassOnly;
Thierry Strudel3d639192016-09-09 11:52:26 -0700135
136const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
137 {"On", CAM_CDS_MODE_ON},
138 {"Off", CAM_CDS_MODE_OFF},
139 {"Auto",CAM_CDS_MODE_AUTO}
140};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700141const QCamera3HardwareInterface::QCameraMap<
142 camera_metadata_enum_android_video_hdr_mode_t,
143 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
144 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
145 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
146};
147
Thierry Strudel54dc9782017-02-15 12:12:10 -0800148const QCamera3HardwareInterface::QCameraMap<
149 camera_metadata_enum_android_binning_correction_mode_t,
150 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
151 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
152 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
153};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700154
155const QCamera3HardwareInterface::QCameraMap<
156 camera_metadata_enum_android_ir_mode_t,
157 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
158 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
159 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
160 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
161};
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraMap<
164 camera_metadata_enum_android_control_effect_mode_t,
165 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
166 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
167 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
168 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
169 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
170 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
171 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
172 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
173 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
174 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
175};
176
177const QCamera3HardwareInterface::QCameraMap<
178 camera_metadata_enum_android_control_awb_mode_t,
179 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
180 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
181 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
182 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
183 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
184 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
185 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
186 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
187 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
188 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
189};
190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_scene_mode_t,
193 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
194 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
195 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
196 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
197 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
198 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
199 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
200 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
201 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
202 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
203 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
204 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
205 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
206 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
207 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
208 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800209 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
210 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700211};
212
213const QCamera3HardwareInterface::QCameraMap<
214 camera_metadata_enum_android_control_af_mode_t,
215 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
216 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
217 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
218 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
219 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
220 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
221 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
222 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_color_correction_aberration_mode_t,
227 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
228 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
229 CAM_COLOR_CORRECTION_ABERRATION_OFF },
230 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
231 CAM_COLOR_CORRECTION_ABERRATION_FAST },
232 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
233 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
234};
235
236const QCamera3HardwareInterface::QCameraMap<
237 camera_metadata_enum_android_control_ae_antibanding_mode_t,
238 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
239 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
240 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
241 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
242 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
243};
244
245const QCamera3HardwareInterface::QCameraMap<
246 camera_metadata_enum_android_control_ae_mode_t,
247 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
248 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
249 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
250 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
251 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
252 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
253};
254
255const QCamera3HardwareInterface::QCameraMap<
256 camera_metadata_enum_android_flash_mode_t,
257 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
258 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
259 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
260 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_statistics_face_detect_mode_t,
265 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
266 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
267 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
268 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
273 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
274 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
275 CAM_FOCUS_UNCALIBRATED },
276 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
277 CAM_FOCUS_APPROXIMATE },
278 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
279 CAM_FOCUS_CALIBRATED }
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_lens_state_t,
284 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
285 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
286 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
287};
288
289const int32_t available_thumbnail_sizes[] = {0, 0,
290 176, 144,
291 240, 144,
292 256, 144,
293 240, 160,
294 256, 154,
295 240, 240,
296 320, 240};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_sensor_test_pattern_mode_t,
300 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
302 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
303 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
304 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
305 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
306 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
307};
308
309/* Since there is no mapping for all the options some Android enum are not listed.
310 * Also, the order in this list is important because while mapping from HAL to Android it will
311 * traverse from lower to higher index which means that for HAL values that are map to different
312 * Android values, the traverse logic will select the first one found.
313 */
314const QCamera3HardwareInterface::QCameraMap<
315 camera_metadata_enum_android_sensor_reference_illuminant1_t,
316 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
328 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
333};
334
335const QCamera3HardwareInterface::QCameraMap<
336 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
337 { 60, CAM_HFR_MODE_60FPS},
338 { 90, CAM_HFR_MODE_90FPS},
339 { 120, CAM_HFR_MODE_120FPS},
340 { 150, CAM_HFR_MODE_150FPS},
341 { 180, CAM_HFR_MODE_180FPS},
342 { 210, CAM_HFR_MODE_210FPS},
343 { 240, CAM_HFR_MODE_240FPS},
344 { 480, CAM_HFR_MODE_480FPS},
345};
346
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700347const QCamera3HardwareInterface::QCameraMap<
348 qcamera3_ext_instant_aec_mode_t,
349 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
350 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
351 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
352 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
353};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800354
355const QCamera3HardwareInterface::QCameraMap<
356 qcamera3_ext_exposure_meter_mode_t,
357 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
358 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
360 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
361 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
362 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
363 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
364 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
365};
366
367const QCamera3HardwareInterface::QCameraMap<
368 qcamera3_ext_iso_mode_t,
369 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
370 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
371 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
372 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
373 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
374 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
375 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
376 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
377 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
378};
379
Thierry Strudel3d639192016-09-09 11:52:26 -0700380camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
381 .initialize = QCamera3HardwareInterface::initialize,
382 .configure_streams = QCamera3HardwareInterface::configure_streams,
383 .register_stream_buffers = NULL,
384 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
385 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
386 .get_metadata_vendor_tag_ops = NULL,
387 .dump = QCamera3HardwareInterface::dump,
388 .flush = QCamera3HardwareInterface::flush,
389 .reserved = {0},
390};
391
392// initialise to some default value
393uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
394
395/*===========================================================================
396 * FUNCTION : QCamera3HardwareInterface
397 *
398 * DESCRIPTION: constructor of QCamera3HardwareInterface
399 *
400 * PARAMETERS :
401 * @cameraId : camera ID
402 *
403 * RETURN : none
404 *==========================================================================*/
405QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
406 const camera_module_callbacks_t *callbacks)
407 : mCameraId(cameraId),
408 mCameraHandle(NULL),
409 mCameraInitialized(false),
410 mCallbackOps(NULL),
411 mMetadataChannel(NULL),
412 mPictureChannel(NULL),
413 mRawChannel(NULL),
414 mSupportChannel(NULL),
415 mAnalysisChannel(NULL),
416 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700417 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700418 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800419 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800420 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mChannelHandle(0),
422 mFirstConfiguration(true),
423 mFlush(false),
424 mFlushPerf(false),
425 mParamHeap(NULL),
426 mParameters(NULL),
427 mPrevParameters(NULL),
428 m_bIsVideo(false),
429 m_bIs4KVideo(false),
430 m_bEisSupportedSize(false),
431 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800432 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700433 m_MobicatMask(0),
434 mMinProcessedFrameDuration(0),
435 mMinJpegFrameDuration(0),
436 mMinRawFrameDuration(0),
437 mMetaFrameCount(0U),
438 mUpdateDebugLevel(false),
439 mCallbacks(callbacks),
440 mCaptureIntent(0),
441 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700442 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800443 /* DevCamDebug metadata internal m control*/
444 mDevCamDebugMetaEnable(0),
445 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mBatchSize(0),
447 mToBeQueuedVidBufs(0),
448 mHFRVideoFps(DEFAULT_VIDEO_FPS),
449 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800450 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800451 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700452 mFirstFrameNumberInBatch(0),
453 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800454 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700455 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
456 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700457 mInstantAEC(false),
458 mResetInstantAEC(false),
459 mInstantAECSettledFrameNumber(0),
460 mAecSkipDisplayFrameBound(0),
461 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800462 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mLastCustIntentFrmNum(-1),
465 mState(CLOSED),
466 mIsDeviceLinked(false),
467 mIsMainCamera(true),
468 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700469 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800470 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800471 mHdrPlusModeEnabled(false),
472 mIsApInputUsedForHdrPlus(false),
473 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800474 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700475{
476 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 mCommon.init(gCamCapability[cameraId]);
478 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700479#ifndef USE_HAL_3_3
480 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
481#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mCameraDevice.common.close = close_camera_device;
485 mCameraDevice.ops = &mCameraOps;
486 mCameraDevice.priv = this;
487 gCamCapability[cameraId]->version = CAM_HAL_V3;
488 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
489 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
490 gCamCapability[cameraId]->min_num_pp_bufs = 3;
491
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800492 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700493
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800494 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mPendingLiveRequest = 0;
496 mCurrentRequestId = -1;
497 pthread_mutex_init(&mMutex, NULL);
498
499 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
500 mDefaultMetadata[i] = NULL;
501
502 // Getting system props of different kinds
503 char prop[PROPERTY_VALUE_MAX];
504 memset(prop, 0, sizeof(prop));
505 property_get("persist.camera.raw.dump", prop, "0");
506 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800507 property_get("persist.camera.hal3.force.hdr", prop, "0");
508 mForceHdrSnapshot = atoi(prop);
509
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 if (mEnableRawDump)
511 LOGD("Raw dump from Camera HAL enabled");
512
513 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
514 memset(mLdafCalib, 0, sizeof(mLdafCalib));
515
516 memset(prop, 0, sizeof(prop));
517 property_get("persist.camera.tnr.preview", prop, "0");
518 m_bTnrPreview = (uint8_t)atoi(prop);
519
520 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800521 property_get("persist.camera.swtnr.preview", prop, "1");
522 m_bSwTnrPreview = (uint8_t)atoi(prop);
523
524 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 property_get("persist.camera.tnr.video", prop, "0");
526 m_bTnrVideo = (uint8_t)atoi(prop);
527
528 memset(prop, 0, sizeof(prop));
529 property_get("persist.camera.avtimer.debug", prop, "0");
530 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800531 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Thierry Strudel54dc9782017-02-15 12:12:10 -0800533 memset(prop, 0, sizeof(prop));
534 property_get("persist.camera.cacmode.disable", prop, "0");
535 m_cacModeDisabled = (uint8_t)atoi(prop);
536
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 //Load and read GPU library.
538 lib_surface_utils = NULL;
539 LINK_get_surface_pixel_alignment = NULL;
540 mSurfaceStridePadding = CAM_PAD_TO_32;
541 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
542 if (lib_surface_utils) {
543 *(void **)&LINK_get_surface_pixel_alignment =
544 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
545 if (LINK_get_surface_pixel_alignment) {
546 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
547 }
548 dlclose(lib_surface_utils);
549 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700550
551 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700552}
553
554/*===========================================================================
555 * FUNCTION : ~QCamera3HardwareInterface
556 *
557 * DESCRIPTION: destructor of QCamera3HardwareInterface
558 *
559 * PARAMETERS : none
560 *
561 * RETURN : none
562 *==========================================================================*/
563QCamera3HardwareInterface::~QCamera3HardwareInterface()
564{
565 LOGD("E");
566
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800567 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700568
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800569 // Disable power hint and enable the perf lock for close camera
570 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
571 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
572
573 // unlink of dualcam during close camera
574 if (mIsDeviceLinked) {
575 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
576 &m_pDualCamCmdPtr->bundle_info;
577 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
578 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
579 pthread_mutex_lock(&gCamLock);
580
581 if (mIsMainCamera == 1) {
582 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
583 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
584 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
585 // related session id should be session id of linked session
586 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
587 } else {
588 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
589 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
590 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
591 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
592 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800593 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800594 pthread_mutex_unlock(&gCamLock);
595
596 rc = mCameraHandle->ops->set_dual_cam_cmd(
597 mCameraHandle->camera_handle);
598 if (rc < 0) {
599 LOGE("Dualcam: Unlink failed, but still proceed to close");
600 }
601 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700602
603 /* We need to stop all streams before deleting any stream */
604 if (mRawDumpChannel) {
605 mRawDumpChannel->stop();
606 }
607
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700608 if (mHdrPlusRawSrcChannel) {
609 mHdrPlusRawSrcChannel->stop();
610 }
611
Thierry Strudel3d639192016-09-09 11:52:26 -0700612 // NOTE: 'camera3_stream_t *' objects are already freed at
613 // this stage by the framework
614 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
615 it != mStreamInfo.end(); it++) {
616 QCamera3ProcessingChannel *channel = (*it)->channel;
617 if (channel) {
618 channel->stop();
619 }
620 }
621 if (mSupportChannel)
622 mSupportChannel->stop();
623
624 if (mAnalysisChannel) {
625 mAnalysisChannel->stop();
626 }
627 if (mMetadataChannel) {
628 mMetadataChannel->stop();
629 }
630 if (mChannelHandle) {
631 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
632 mChannelHandle);
633 LOGD("stopping channel %d", mChannelHandle);
634 }
635
636 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
637 it != mStreamInfo.end(); it++) {
638 QCamera3ProcessingChannel *channel = (*it)->channel;
639 if (channel)
640 delete channel;
641 free (*it);
642 }
643 if (mSupportChannel) {
644 delete mSupportChannel;
645 mSupportChannel = NULL;
646 }
647
648 if (mAnalysisChannel) {
649 delete mAnalysisChannel;
650 mAnalysisChannel = NULL;
651 }
652 if (mRawDumpChannel) {
653 delete mRawDumpChannel;
654 mRawDumpChannel = NULL;
655 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700656 if (mHdrPlusRawSrcChannel) {
657 delete mHdrPlusRawSrcChannel;
658 mHdrPlusRawSrcChannel = NULL;
659 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700660 if (mDummyBatchChannel) {
661 delete mDummyBatchChannel;
662 mDummyBatchChannel = NULL;
663 }
664
665 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800666 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700667
668 if (mMetadataChannel) {
669 delete mMetadataChannel;
670 mMetadataChannel = NULL;
671 }
672
673 /* Clean up all channels */
674 if (mCameraInitialized) {
675 if(!mFirstConfiguration){
676 //send the last unconfigure
677 cam_stream_size_info_t stream_config_info;
678 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
679 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
680 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800681 m_bIs4KVideo ? 0 :
682 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700683 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700684 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
685 stream_config_info);
686 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
687 if (rc < 0) {
688 LOGE("set_parms failed for unconfigure");
689 }
690 }
691 deinitParameters();
692 }
693
694 if (mChannelHandle) {
695 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
696 mChannelHandle);
697 LOGH("deleting channel %d", mChannelHandle);
698 mChannelHandle = 0;
699 }
700
701 if (mState != CLOSED)
702 closeCamera();
703
704 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
705 req.mPendingBufferList.clear();
706 }
707 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700708 for (pendingRequestIterator i = mPendingRequestsList.begin();
709 i != mPendingRequestsList.end();) {
710 i = erasePendingRequest(i);
711 }
712 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
713 if (mDefaultMetadata[i])
714 free_camera_metadata(mDefaultMetadata[i]);
715
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800716 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700717
718 pthread_cond_destroy(&mRequestCond);
719
720 pthread_cond_destroy(&mBuffersCond);
721
722 pthread_mutex_destroy(&mMutex);
723 LOGD("X");
724}
725
726/*===========================================================================
727 * FUNCTION : erasePendingRequest
728 *
729 * DESCRIPTION: function to erase a desired pending request after freeing any
730 * allocated memory
731 *
732 * PARAMETERS :
733 * @i : iterator pointing to pending request to be erased
734 *
735 * RETURN : iterator pointing to the next request
736 *==========================================================================*/
737QCamera3HardwareInterface::pendingRequestIterator
738 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
739{
740 if (i->input_buffer != NULL) {
741 free(i->input_buffer);
742 i->input_buffer = NULL;
743 }
744 if (i->settings != NULL)
745 free_camera_metadata((camera_metadata_t*)i->settings);
746 return mPendingRequestsList.erase(i);
747}
748
749/*===========================================================================
750 * FUNCTION : camEvtHandle
751 *
752 * DESCRIPTION: Function registered to mm-camera-interface to handle events
753 *
754 * PARAMETERS :
755 * @camera_handle : interface layer camera handle
756 * @evt : ptr to event
757 * @user_data : user data ptr
758 *
759 * RETURN : none
760 *==========================================================================*/
761void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
762 mm_camera_event_t *evt,
763 void *user_data)
764{
765 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
766 if (obj && evt) {
767 switch(evt->server_event_type) {
768 case CAM_EVENT_TYPE_DAEMON_DIED:
769 pthread_mutex_lock(&obj->mMutex);
770 obj->mState = ERROR;
771 pthread_mutex_unlock(&obj->mMutex);
772 LOGE("Fatal, camera daemon died");
773 break;
774
775 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
776 LOGD("HAL got request pull from Daemon");
777 pthread_mutex_lock(&obj->mMutex);
778 obj->mWokenUpByDaemon = true;
779 obj->unblockRequestIfNecessary();
780 pthread_mutex_unlock(&obj->mMutex);
781 break;
782
783 default:
784 LOGW("Warning: Unhandled event %d",
785 evt->server_event_type);
786 break;
787 }
788 } else {
789 LOGE("NULL user_data/evt");
790 }
791}
792
793/*===========================================================================
794 * FUNCTION : openCamera
795 *
796 * DESCRIPTION: open camera
797 *
798 * PARAMETERS :
799 * @hw_device : double ptr for camera device struct
800 *
801 * RETURN : int32_t type of status
802 * NO_ERROR -- success
803 * none-zero failure code
804 *==========================================================================*/
805int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
806{
807 int rc = 0;
808 if (mState != CLOSED) {
809 *hw_device = NULL;
810 return PERMISSION_DENIED;
811 }
812
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800813 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700814 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
815 mCameraId);
816
817 rc = openCamera();
818 if (rc == 0) {
819 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800820 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700821 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800822 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700823
Thierry Strudel3d639192016-09-09 11:52:26 -0700824 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
825 mCameraId, rc);
826
827 if (rc == NO_ERROR) {
828 mState = OPENED;
829 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800830
831 mIsApInputUsedForHdrPlus =
832 property_get_bool("persist.camera.hdrplus.apinput", false);
833 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
834 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
835
Thierry Strudel3d639192016-09-09 11:52:26 -0700836 return rc;
837}
838
839/*===========================================================================
840 * FUNCTION : openCamera
841 *
842 * DESCRIPTION: open camera
843 *
844 * PARAMETERS : none
845 *
846 * RETURN : int32_t type of status
847 * NO_ERROR -- success
848 * none-zero failure code
849 *==========================================================================*/
850int QCamera3HardwareInterface::openCamera()
851{
852 int rc = 0;
853 char value[PROPERTY_VALUE_MAX];
854
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800855 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 if (mCameraHandle) {
857 LOGE("Failure: Camera already opened");
858 return ALREADY_EXISTS;
859 }
860
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800861 if (gHdrPlusClient != nullptr) {
862 rc = gHdrPlusClient->resumeEasel();
863 if (rc != 0) {
864 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
865 return rc;
866 }
867 }
868
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
870 if (rc < 0) {
871 LOGE("Failed to reserve flash for camera id: %d",
872 mCameraId);
873 return UNKNOWN_ERROR;
874 }
875
876 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
877 if (rc) {
878 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
879 return rc;
880 }
881
882 if (!mCameraHandle) {
883 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
884 return -ENODEV;
885 }
886
887 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
888 camEvtHandle, (void *)this);
889
890 if (rc < 0) {
891 LOGE("Error, failed to register event callback");
892 /* Not closing camera here since it is already handled in destructor */
893 return FAILED_TRANSACTION;
894 }
895
896 mExifParams.debug_params =
897 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
898 if (mExifParams.debug_params) {
899 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
900 } else {
901 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
902 return NO_MEMORY;
903 }
904 mFirstConfiguration = true;
905
906 //Notify display HAL that a camera session is active.
907 //But avoid calling the same during bootup because camera service might open/close
908 //cameras at boot time during its initialization and display service will also internally
909 //wait for camera service to initialize first while calling this display API, resulting in a
910 //deadlock situation. Since boot time camera open/close calls are made only to fetch
911 //capabilities, no need of this display bw optimization.
912 //Use "service.bootanim.exit" property to know boot status.
913 property_get("service.bootanim.exit", value, "0");
914 if (atoi(value) == 1) {
915 pthread_mutex_lock(&gCamLock);
916 if (gNumCameraSessions++ == 0) {
917 setCameraLaunchStatus(true);
918 }
919 pthread_mutex_unlock(&gCamLock);
920 }
921
922 //fill the session id needed while linking dual cam
923 pthread_mutex_lock(&gCamLock);
924 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
925 &sessionId[mCameraId]);
926 pthread_mutex_unlock(&gCamLock);
927
928 if (rc < 0) {
929 LOGE("Error, failed to get sessiion id");
930 return UNKNOWN_ERROR;
931 } else {
932 //Allocate related cam sync buffer
933 //this is needed for the payload that goes along with bundling cmd for related
934 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700935 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
936 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700937 if(rc != OK) {
938 rc = NO_MEMORY;
939 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
940 return NO_MEMORY;
941 }
942
943 //Map memory for related cam sync buffer
944 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700945 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
946 m_pDualCamCmdHeap->getFd(0),
947 sizeof(cam_dual_camera_cmd_info_t),
948 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700949 if(rc < 0) {
950 LOGE("Dualcam: failed to map Related cam sync buffer");
951 rc = FAILED_TRANSACTION;
952 return NO_MEMORY;
953 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700954 m_pDualCamCmdPtr =
955 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700956 }
957
958 LOGH("mCameraId=%d",mCameraId);
959
960 return NO_ERROR;
961}
962
963/*===========================================================================
964 * FUNCTION : closeCamera
965 *
966 * DESCRIPTION: close camera
967 *
968 * PARAMETERS : none
969 *
970 * RETURN : int32_t type of status
971 * NO_ERROR -- success
972 * none-zero failure code
973 *==========================================================================*/
974int QCamera3HardwareInterface::closeCamera()
975{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800976 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700977 int rc = NO_ERROR;
978 char value[PROPERTY_VALUE_MAX];
979
980 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
981 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700982
983 // unmap memory for related cam sync buffer
984 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800985 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 if (NULL != m_pDualCamCmdHeap) {
987 m_pDualCamCmdHeap->deallocate();
988 delete m_pDualCamCmdHeap;
989 m_pDualCamCmdHeap = NULL;
990 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700991 }
992
Thierry Strudel3d639192016-09-09 11:52:26 -0700993 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
994 mCameraHandle = NULL;
995
996 //reset session id to some invalid id
997 pthread_mutex_lock(&gCamLock);
998 sessionId[mCameraId] = 0xDEADBEEF;
999 pthread_mutex_unlock(&gCamLock);
1000
1001 //Notify display HAL that there is no active camera session
1002 //but avoid calling the same during bootup. Refer to openCamera
1003 //for more details.
1004 property_get("service.bootanim.exit", value, "0");
1005 if (atoi(value) == 1) {
1006 pthread_mutex_lock(&gCamLock);
1007 if (--gNumCameraSessions == 0) {
1008 setCameraLaunchStatus(false);
1009 }
1010 pthread_mutex_unlock(&gCamLock);
1011 }
1012
Thierry Strudel3d639192016-09-09 11:52:26 -07001013 if (mExifParams.debug_params) {
1014 free(mExifParams.debug_params);
1015 mExifParams.debug_params = NULL;
1016 }
1017 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1018 LOGW("Failed to release flash for camera id: %d",
1019 mCameraId);
1020 }
1021 mState = CLOSED;
1022 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1023 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001024
1025 if (gHdrPlusClient != nullptr) {
1026 rc = gHdrPlusClient->suspendEasel();
1027 if (rc != 0) {
1028 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1029 }
1030 }
1031
Thierry Strudel3d639192016-09-09 11:52:26 -07001032 return rc;
1033}
1034
1035/*===========================================================================
1036 * FUNCTION : initialize
1037 *
1038 * DESCRIPTION: Initialize frameworks callback functions
1039 *
1040 * PARAMETERS :
1041 * @callback_ops : callback function to frameworks
1042 *
1043 * RETURN :
1044 *
1045 *==========================================================================*/
1046int QCamera3HardwareInterface::initialize(
1047 const struct camera3_callback_ops *callback_ops)
1048{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001049 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001050 int rc;
1051
1052 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1053 pthread_mutex_lock(&mMutex);
1054
1055 // Validate current state
1056 switch (mState) {
1057 case OPENED:
1058 /* valid state */
1059 break;
1060 default:
1061 LOGE("Invalid state %d", mState);
1062 rc = -ENODEV;
1063 goto err1;
1064 }
1065
1066 rc = initParameters();
1067 if (rc < 0) {
1068 LOGE("initParamters failed %d", rc);
1069 goto err1;
1070 }
1071 mCallbackOps = callback_ops;
1072
1073 mChannelHandle = mCameraHandle->ops->add_channel(
1074 mCameraHandle->camera_handle, NULL, NULL, this);
1075 if (mChannelHandle == 0) {
1076 LOGE("add_channel failed");
1077 rc = -ENOMEM;
1078 pthread_mutex_unlock(&mMutex);
1079 return rc;
1080 }
1081
1082 pthread_mutex_unlock(&mMutex);
1083 mCameraInitialized = true;
1084 mState = INITIALIZED;
1085 LOGI("X");
1086 return 0;
1087
1088err1:
1089 pthread_mutex_unlock(&mMutex);
1090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : validateStreamDimensions
1095 *
1096 * DESCRIPTION: Check if the configuration requested are those advertised
1097 *
1098 * PARAMETERS :
1099 * @stream_list : streams to be configured
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::validateStreamDimensions(
1105 camera3_stream_configuration_t *streamList)
1106{
1107 int rc = NO_ERROR;
1108 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001109 uint32_t depthWidth =
1110 gCamCapability[mCameraId]->active_array_size.width;
1111 uint32_t depthHeight =
1112 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001113
1114 camera3_stream_t *inputStream = NULL;
1115 /*
1116 * Loop through all streams to find input stream if it exists*
1117 */
1118 for (size_t i = 0; i< streamList->num_streams; i++) {
1119 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1120 if (inputStream != NULL) {
1121 LOGE("Error, Multiple input streams requested");
1122 return -EINVAL;
1123 }
1124 inputStream = streamList->streams[i];
1125 }
1126 }
1127 /*
1128 * Loop through all streams requested in configuration
1129 * Check if unsupported sizes have been requested on any of them
1130 */
1131 for (size_t j = 0; j < streamList->num_streams; j++) {
1132 bool sizeFound = false;
1133 camera3_stream_t *newStream = streamList->streams[j];
1134
1135 uint32_t rotatedHeight = newStream->height;
1136 uint32_t rotatedWidth = newStream->width;
1137 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1138 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1139 rotatedHeight = newStream->width;
1140 rotatedWidth = newStream->height;
1141 }
1142
1143 /*
1144 * Sizes are different for each type of stream format check against
1145 * appropriate table.
1146 */
1147 switch (newStream->format) {
1148 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1149 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1150 case HAL_PIXEL_FORMAT_RAW10:
1151 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1152 for (size_t i = 0; i < count; i++) {
1153 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1154 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1155 sizeFound = true;
1156 break;
1157 }
1158 }
1159 break;
1160 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001161 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1162 //As per spec. depth cloud should be sample count / 16
1163 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1164 if ((depthSamplesCount == newStream->width) &&
1165 (1 == newStream->height)) {
1166 sizeFound = true;
1167 }
1168 break;
1169 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001170 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1171 /* Verify set size against generated sizes table */
1172 for (size_t i = 0; i < count; i++) {
1173 if (((int32_t)rotatedWidth ==
1174 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1175 ((int32_t)rotatedHeight ==
1176 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1177 sizeFound = true;
1178 break;
1179 }
1180 }
1181 break;
1182 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1183 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1184 default:
1185 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1186 || newStream->stream_type == CAMERA3_STREAM_INPUT
1187 || IS_USAGE_ZSL(newStream->usage)) {
1188 if (((int32_t)rotatedWidth ==
1189 gCamCapability[mCameraId]->active_array_size.width) &&
1190 ((int32_t)rotatedHeight ==
1191 gCamCapability[mCameraId]->active_array_size.height)) {
1192 sizeFound = true;
1193 break;
1194 }
1195 /* We could potentially break here to enforce ZSL stream
1196 * set from frameworks always is full active array size
1197 * but it is not clear from the spc if framework will always
1198 * follow that, also we have logic to override to full array
1199 * size, so keeping the logic lenient at the moment
1200 */
1201 }
1202 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1203 MAX_SIZES_CNT);
1204 for (size_t i = 0; i < count; i++) {
1205 if (((int32_t)rotatedWidth ==
1206 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1207 ((int32_t)rotatedHeight ==
1208 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1209 sizeFound = true;
1210 break;
1211 }
1212 }
1213 break;
1214 } /* End of switch(newStream->format) */
1215
1216 /* We error out even if a single stream has unsupported size set */
1217 if (!sizeFound) {
1218 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1219 rotatedWidth, rotatedHeight, newStream->format,
1220 gCamCapability[mCameraId]->active_array_size.width,
1221 gCamCapability[mCameraId]->active_array_size.height);
1222 rc = -EINVAL;
1223 break;
1224 }
1225 } /* End of for each stream */
1226 return rc;
1227}
1228
1229/*==============================================================================
1230 * FUNCTION : isSupportChannelNeeded
1231 *
1232 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1233 *
1234 * PARAMETERS :
1235 * @stream_list : streams to be configured
1236 * @stream_config_info : the config info for streams to be configured
1237 *
1238 * RETURN : Boolen true/false decision
1239 *
1240 *==========================================================================*/
1241bool QCamera3HardwareInterface::isSupportChannelNeeded(
1242 camera3_stream_configuration_t *streamList,
1243 cam_stream_size_info_t stream_config_info)
1244{
1245 uint32_t i;
1246 bool pprocRequested = false;
1247 /* Check for conditions where PProc pipeline does not have any streams*/
1248 for (i = 0; i < stream_config_info.num_streams; i++) {
1249 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1250 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1251 pprocRequested = true;
1252 break;
1253 }
1254 }
1255
1256 if (pprocRequested == false )
1257 return true;
1258
1259 /* Dummy stream needed if only raw or jpeg streams present */
1260 for (i = 0; i < streamList->num_streams; i++) {
1261 switch(streamList->streams[i]->format) {
1262 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1263 case HAL_PIXEL_FORMAT_RAW10:
1264 case HAL_PIXEL_FORMAT_RAW16:
1265 case HAL_PIXEL_FORMAT_BLOB:
1266 break;
1267 default:
1268 return false;
1269 }
1270 }
1271 return true;
1272}
1273
1274/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001275 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001277 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001278 *
1279 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001280 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 *
1282 * RETURN : int32_t type of status
1283 * NO_ERROR -- success
1284 * none-zero failure code
1285 *
1286 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001287int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001288{
1289 int32_t rc = NO_ERROR;
1290
1291 cam_dimension_t max_dim = {0, 0};
1292 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1293 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1294 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1295 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1296 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1297 }
1298
1299 clear_metadata_buffer(mParameters);
1300
1301 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1302 max_dim);
1303 if (rc != NO_ERROR) {
1304 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1305 return rc;
1306 }
1307
1308 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1309 if (rc != NO_ERROR) {
1310 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1311 return rc;
1312 }
1313
1314 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001315 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001316
1317 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1318 mParameters);
1319 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001320 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001321 return rc;
1322 }
1323
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001324 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001325 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1326 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1327 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1328 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1329 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001330
1331 return rc;
1332}
1333
1334/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001335 * FUNCTION : addToPPFeatureMask
1336 *
1337 * DESCRIPTION: add additional features to pp feature mask based on
1338 * stream type and usecase
1339 *
1340 * PARAMETERS :
1341 * @stream_format : stream type for feature mask
1342 * @stream_idx : stream idx within postprocess_mask list to change
1343 *
1344 * RETURN : NULL
1345 *
1346 *==========================================================================*/
1347void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1348 uint32_t stream_idx)
1349{
1350 char feature_mask_value[PROPERTY_VALUE_MAX];
1351 cam_feature_mask_t feature_mask;
1352 int args_converted;
1353 int property_len;
1354
1355 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001356#ifdef _LE_CAMERA_
1357 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1358 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1359 property_len = property_get("persist.camera.hal3.feature",
1360 feature_mask_value, swtnr_feature_mask_value);
1361#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001362 property_len = property_get("persist.camera.hal3.feature",
1363 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001364#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001365 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1366 (feature_mask_value[1] == 'x')) {
1367 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1368 } else {
1369 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1370 }
1371 if (1 != args_converted) {
1372 feature_mask = 0;
1373 LOGE("Wrong feature mask %s", feature_mask_value);
1374 return;
1375 }
1376
1377 switch (stream_format) {
1378 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1379 /* Add LLVD to pp feature mask only if video hint is enabled */
1380 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1381 mStreamConfigInfo.postprocess_mask[stream_idx]
1382 |= CAM_QTI_FEATURE_SW_TNR;
1383 LOGH("Added SW TNR to pp feature mask");
1384 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1385 mStreamConfigInfo.postprocess_mask[stream_idx]
1386 |= CAM_QCOM_FEATURE_LLVD;
1387 LOGH("Added LLVD SeeMore to pp feature mask");
1388 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001389 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1390 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1391 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1392 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001393 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1394 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1395 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1396 CAM_QTI_FEATURE_BINNING_CORRECTION;
1397 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001398 break;
1399 }
1400 default:
1401 break;
1402 }
1403 LOGD("PP feature mask %llx",
1404 mStreamConfigInfo.postprocess_mask[stream_idx]);
1405}
1406
1407/*==============================================================================
1408 * FUNCTION : updateFpsInPreviewBuffer
1409 *
1410 * DESCRIPTION: update FPS information in preview buffer.
1411 *
1412 * PARAMETERS :
1413 * @metadata : pointer to metadata buffer
1414 * @frame_number: frame_number to look for in pending buffer list
1415 *
1416 * RETURN : None
1417 *
1418 *==========================================================================*/
1419void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1420 uint32_t frame_number)
1421{
1422 // Mark all pending buffers for this particular request
1423 // with corresponding framerate information
1424 for (List<PendingBuffersInRequest>::iterator req =
1425 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1426 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1427 for(List<PendingBufferInfo>::iterator j =
1428 req->mPendingBufferList.begin();
1429 j != req->mPendingBufferList.end(); j++) {
1430 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1431 if ((req->frame_number == frame_number) &&
1432 (channel->getStreamTypeMask() &
1433 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1434 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1435 CAM_INTF_PARM_FPS_RANGE, metadata) {
1436 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1437 struct private_handle_t *priv_handle =
1438 (struct private_handle_t *)(*(j->buffer));
1439 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1440 }
1441 }
1442 }
1443 }
1444}
1445
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001446/*==============================================================================
1447 * FUNCTION : updateTimeStampInPendingBuffers
1448 *
1449 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1450 * of a frame number
1451 *
1452 * PARAMETERS :
1453 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1454 * @timestamp : timestamp to be set
1455 *
1456 * RETURN : None
1457 *
1458 *==========================================================================*/
1459void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1460 uint32_t frameNumber, nsecs_t timestamp)
1461{
1462 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1463 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1464 if (req->frame_number != frameNumber)
1465 continue;
1466
1467 for (auto k = req->mPendingBufferList.begin();
1468 k != req->mPendingBufferList.end(); k++ ) {
1469 struct private_handle_t *priv_handle =
1470 (struct private_handle_t *) (*(k->buffer));
1471 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1472 }
1473 }
1474 return;
1475}
1476
Thierry Strudel3d639192016-09-09 11:52:26 -07001477/*===========================================================================
1478 * FUNCTION : configureStreams
1479 *
1480 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1481 * and output streams.
1482 *
1483 * PARAMETERS :
1484 * @stream_list : streams to be configured
1485 *
1486 * RETURN :
1487 *
1488 *==========================================================================*/
1489int QCamera3HardwareInterface::configureStreams(
1490 camera3_stream_configuration_t *streamList)
1491{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001492 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001493 int rc = 0;
1494
1495 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001496 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001498 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001499
1500 return rc;
1501}
1502
1503/*===========================================================================
1504 * FUNCTION : configureStreamsPerfLocked
1505 *
1506 * DESCRIPTION: configureStreams while perfLock is held.
1507 *
1508 * PARAMETERS :
1509 * @stream_list : streams to be configured
1510 *
1511 * RETURN : int32_t type of status
1512 * NO_ERROR -- success
1513 * none-zero failure code
1514 *==========================================================================*/
1515int QCamera3HardwareInterface::configureStreamsPerfLocked(
1516 camera3_stream_configuration_t *streamList)
1517{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001518 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001519 int rc = 0;
1520
1521 // Sanity check stream_list
1522 if (streamList == NULL) {
1523 LOGE("NULL stream configuration");
1524 return BAD_VALUE;
1525 }
1526 if (streamList->streams == NULL) {
1527 LOGE("NULL stream list");
1528 return BAD_VALUE;
1529 }
1530
1531 if (streamList->num_streams < 1) {
1532 LOGE("Bad number of streams requested: %d",
1533 streamList->num_streams);
1534 return BAD_VALUE;
1535 }
1536
1537 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1538 LOGE("Maximum number of streams %d exceeded: %d",
1539 MAX_NUM_STREAMS, streamList->num_streams);
1540 return BAD_VALUE;
1541 }
1542
1543 mOpMode = streamList->operation_mode;
1544 LOGD("mOpMode: %d", mOpMode);
1545
1546 /* first invalidate all the steams in the mStreamList
1547 * if they appear again, they will be validated */
1548 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1549 it != mStreamInfo.end(); it++) {
1550 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1551 if (channel) {
1552 channel->stop();
1553 }
1554 (*it)->status = INVALID;
1555 }
1556
1557 if (mRawDumpChannel) {
1558 mRawDumpChannel->stop();
1559 delete mRawDumpChannel;
1560 mRawDumpChannel = NULL;
1561 }
1562
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001563 if (mHdrPlusRawSrcChannel) {
1564 mHdrPlusRawSrcChannel->stop();
1565 delete mHdrPlusRawSrcChannel;
1566 mHdrPlusRawSrcChannel = NULL;
1567 }
1568
Thierry Strudel3d639192016-09-09 11:52:26 -07001569 if (mSupportChannel)
1570 mSupportChannel->stop();
1571
1572 if (mAnalysisChannel) {
1573 mAnalysisChannel->stop();
1574 }
1575 if (mMetadataChannel) {
1576 /* If content of mStreamInfo is not 0, there is metadata stream */
1577 mMetadataChannel->stop();
1578 }
1579 if (mChannelHandle) {
1580 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1581 mChannelHandle);
1582 LOGD("stopping channel %d", mChannelHandle);
1583 }
1584
1585 pthread_mutex_lock(&mMutex);
1586
1587 // Check state
1588 switch (mState) {
1589 case INITIALIZED:
1590 case CONFIGURED:
1591 case STARTED:
1592 /* valid state */
1593 break;
1594 default:
1595 LOGE("Invalid state %d", mState);
1596 pthread_mutex_unlock(&mMutex);
1597 return -ENODEV;
1598 }
1599
1600 /* Check whether we have video stream */
1601 m_bIs4KVideo = false;
1602 m_bIsVideo = false;
1603 m_bEisSupportedSize = false;
1604 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001605 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001606 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001607 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 uint32_t videoWidth = 0U;
1609 uint32_t videoHeight = 0U;
1610 size_t rawStreamCnt = 0;
1611 size_t stallStreamCnt = 0;
1612 size_t processedStreamCnt = 0;
1613 // Number of streams on ISP encoder path
1614 size_t numStreamsOnEncoder = 0;
1615 size_t numYuv888OnEncoder = 0;
1616 bool bYuv888OverrideJpeg = false;
1617 cam_dimension_t largeYuv888Size = {0, 0};
1618 cam_dimension_t maxViewfinderSize = {0, 0};
1619 bool bJpegExceeds4K = false;
1620 bool bJpegOnEncoder = false;
1621 bool bUseCommonFeatureMask = false;
1622 cam_feature_mask_t commonFeatureMask = 0;
1623 bool bSmallJpegSize = false;
1624 uint32_t width_ratio;
1625 uint32_t height_ratio;
1626 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1627 camera3_stream_t *inputStream = NULL;
1628 bool isJpeg = false;
1629 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001630 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001631
1632 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1633
1634 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001635 uint8_t eis_prop_set;
1636 uint32_t maxEisWidth = 0;
1637 uint32_t maxEisHeight = 0;
1638
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001639 // Initialize all instant AEC related variables
1640 mInstantAEC = false;
1641 mResetInstantAEC = false;
1642 mInstantAECSettledFrameNumber = 0;
1643 mAecSkipDisplayFrameBound = 0;
1644 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001645 mCurrFeatureState = 0;
1646 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001647
Thierry Strudel3d639192016-09-09 11:52:26 -07001648 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1649
1650 size_t count = IS_TYPE_MAX;
1651 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1652 for (size_t i = 0; i < count; i++) {
1653 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001654 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1655 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001656 break;
1657 }
1658 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001659
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001660 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001661 maxEisWidth = MAX_EIS_WIDTH;
1662 maxEisHeight = MAX_EIS_HEIGHT;
1663 }
1664
1665 /* EIS setprop control */
1666 char eis_prop[PROPERTY_VALUE_MAX];
1667 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001668 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001669 eis_prop_set = (uint8_t)atoi(eis_prop);
1670
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001671 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001672 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1673
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001674 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1675 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001676
Thierry Strudel3d639192016-09-09 11:52:26 -07001677 /* stream configurations */
1678 for (size_t i = 0; i < streamList->num_streams; i++) {
1679 camera3_stream_t *newStream = streamList->streams[i];
1680 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1681 "height = %d, rotation = %d, usage = 0x%x",
1682 i, newStream->stream_type, newStream->format,
1683 newStream->width, newStream->height, newStream->rotation,
1684 newStream->usage);
1685 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1686 newStream->stream_type == CAMERA3_STREAM_INPUT){
1687 isZsl = true;
1688 }
1689 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1690 inputStream = newStream;
1691 }
1692
Emilian Peev7650c122017-01-19 08:24:33 -08001693 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1694 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001695 isJpeg = true;
1696 jpegSize.width = newStream->width;
1697 jpegSize.height = newStream->height;
1698 if (newStream->width > VIDEO_4K_WIDTH ||
1699 newStream->height > VIDEO_4K_HEIGHT)
1700 bJpegExceeds4K = true;
1701 }
1702
1703 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1704 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1705 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001706 // In HAL3 we can have multiple different video streams.
1707 // The variables video width and height are used below as
1708 // dimensions of the biggest of them
1709 if (videoWidth < newStream->width ||
1710 videoHeight < newStream->height) {
1711 videoWidth = newStream->width;
1712 videoHeight = newStream->height;
1713 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001714 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1715 (VIDEO_4K_HEIGHT <= newStream->height)) {
1716 m_bIs4KVideo = true;
1717 }
1718 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1719 (newStream->height <= maxEisHeight);
1720 }
1721 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1722 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1723 switch (newStream->format) {
1724 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001725 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1726 depthPresent = true;
1727 break;
1728 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 stallStreamCnt++;
1730 if (isOnEncoder(maxViewfinderSize, newStream->width,
1731 newStream->height)) {
1732 numStreamsOnEncoder++;
1733 bJpegOnEncoder = true;
1734 }
1735 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1736 newStream->width);
1737 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1738 newStream->height);;
1739 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1740 "FATAL: max_downscale_factor cannot be zero and so assert");
1741 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1742 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1743 LOGH("Setting small jpeg size flag to true");
1744 bSmallJpegSize = true;
1745 }
1746 break;
1747 case HAL_PIXEL_FORMAT_RAW10:
1748 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1749 case HAL_PIXEL_FORMAT_RAW16:
1750 rawStreamCnt++;
1751 break;
1752 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1753 processedStreamCnt++;
1754 if (isOnEncoder(maxViewfinderSize, newStream->width,
1755 newStream->height)) {
1756 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1757 !IS_USAGE_ZSL(newStream->usage)) {
1758 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1759 }
1760 numStreamsOnEncoder++;
1761 }
1762 break;
1763 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1764 processedStreamCnt++;
1765 if (isOnEncoder(maxViewfinderSize, newStream->width,
1766 newStream->height)) {
1767 // If Yuv888 size is not greater than 4K, set feature mask
1768 // to SUPERSET so that it support concurrent request on
1769 // YUV and JPEG.
1770 if (newStream->width <= VIDEO_4K_WIDTH &&
1771 newStream->height <= VIDEO_4K_HEIGHT) {
1772 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1773 }
1774 numStreamsOnEncoder++;
1775 numYuv888OnEncoder++;
1776 largeYuv888Size.width = newStream->width;
1777 largeYuv888Size.height = newStream->height;
1778 }
1779 break;
1780 default:
1781 processedStreamCnt++;
1782 if (isOnEncoder(maxViewfinderSize, newStream->width,
1783 newStream->height)) {
1784 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1785 numStreamsOnEncoder++;
1786 }
1787 break;
1788 }
1789
1790 }
1791 }
1792
1793 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1794 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1795 !m_bIsVideo) {
1796 m_bEisEnable = false;
1797 }
1798
Thierry Strudel54dc9782017-02-15 12:12:10 -08001799 uint8_t forceEnableTnr = 0;
1800 char tnr_prop[PROPERTY_VALUE_MAX];
1801 memset(tnr_prop, 0, sizeof(tnr_prop));
1802 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1803 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1804
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 /* Logic to enable/disable TNR based on specific config size/etc.*/
1806 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1807 ((videoWidth == 1920 && videoHeight == 1080) ||
1808 (videoWidth == 1280 && videoHeight == 720)) &&
1809 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1810 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001811 else if (forceEnableTnr)
1812 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001813
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001814 char videoHdrProp[PROPERTY_VALUE_MAX];
1815 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1816 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1817 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1818
1819 if (hdr_mode_prop == 1 && m_bIsVideo &&
1820 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1821 m_bVideoHdrEnabled = true;
1822 else
1823 m_bVideoHdrEnabled = false;
1824
1825
Thierry Strudel3d639192016-09-09 11:52:26 -07001826 /* Check if num_streams is sane */
1827 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1828 rawStreamCnt > MAX_RAW_STREAMS ||
1829 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1830 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1831 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1832 pthread_mutex_unlock(&mMutex);
1833 return -EINVAL;
1834 }
1835 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001836 if (isZsl && m_bIs4KVideo) {
1837 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001838 pthread_mutex_unlock(&mMutex);
1839 return -EINVAL;
1840 }
1841 /* Check if stream sizes are sane */
1842 if (numStreamsOnEncoder > 2) {
1843 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1844 pthread_mutex_unlock(&mMutex);
1845 return -EINVAL;
1846 } else if (1 < numStreamsOnEncoder){
1847 bUseCommonFeatureMask = true;
1848 LOGH("Multiple streams above max viewfinder size, common mask needed");
1849 }
1850
1851 /* Check if BLOB size is greater than 4k in 4k recording case */
1852 if (m_bIs4KVideo && bJpegExceeds4K) {
1853 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1854 pthread_mutex_unlock(&mMutex);
1855 return -EINVAL;
1856 }
1857
Emilian Peev7650c122017-01-19 08:24:33 -08001858 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1859 depthPresent) {
1860 LOGE("HAL doesn't support depth streams in HFR mode!");
1861 pthread_mutex_unlock(&mMutex);
1862 return -EINVAL;
1863 }
1864
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1866 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1867 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1868 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1869 // configurations:
1870 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1871 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1872 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1873 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1874 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1875 __func__);
1876 pthread_mutex_unlock(&mMutex);
1877 return -EINVAL;
1878 }
1879
1880 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1881 // the YUV stream's size is greater or equal to the JPEG size, set common
1882 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1883 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1884 jpegSize.width, jpegSize.height) &&
1885 largeYuv888Size.width > jpegSize.width &&
1886 largeYuv888Size.height > jpegSize.height) {
1887 bYuv888OverrideJpeg = true;
1888 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1889 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1890 }
1891
1892 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1893 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1894 commonFeatureMask);
1895 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1896 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1897
1898 rc = validateStreamDimensions(streamList);
1899 if (rc == NO_ERROR) {
1900 rc = validateStreamRotations(streamList);
1901 }
1902 if (rc != NO_ERROR) {
1903 LOGE("Invalid stream configuration requested!");
1904 pthread_mutex_unlock(&mMutex);
1905 return rc;
1906 }
1907
1908 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1909 for (size_t i = 0; i < streamList->num_streams; i++) {
1910 camera3_stream_t *newStream = streamList->streams[i];
1911 LOGH("newStream type = %d, stream format = %d "
1912 "stream size : %d x %d, stream rotation = %d",
1913 newStream->stream_type, newStream->format,
1914 newStream->width, newStream->height, newStream->rotation);
1915 //if the stream is in the mStreamList validate it
1916 bool stream_exists = false;
1917 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1918 it != mStreamInfo.end(); it++) {
1919 if ((*it)->stream == newStream) {
1920 QCamera3ProcessingChannel *channel =
1921 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1922 stream_exists = true;
1923 if (channel)
1924 delete channel;
1925 (*it)->status = VALID;
1926 (*it)->stream->priv = NULL;
1927 (*it)->channel = NULL;
1928 }
1929 }
1930 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1931 //new stream
1932 stream_info_t* stream_info;
1933 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1934 if (!stream_info) {
1935 LOGE("Could not allocate stream info");
1936 rc = -ENOMEM;
1937 pthread_mutex_unlock(&mMutex);
1938 return rc;
1939 }
1940 stream_info->stream = newStream;
1941 stream_info->status = VALID;
1942 stream_info->channel = NULL;
1943 mStreamInfo.push_back(stream_info);
1944 }
1945 /* Covers Opaque ZSL and API1 F/W ZSL */
1946 if (IS_USAGE_ZSL(newStream->usage)
1947 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1948 if (zslStream != NULL) {
1949 LOGE("Multiple input/reprocess streams requested!");
1950 pthread_mutex_unlock(&mMutex);
1951 return BAD_VALUE;
1952 }
1953 zslStream = newStream;
1954 }
1955 /* Covers YUV reprocess */
1956 if (inputStream != NULL) {
1957 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1958 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1959 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1960 && inputStream->width == newStream->width
1961 && inputStream->height == newStream->height) {
1962 if (zslStream != NULL) {
1963 /* This scenario indicates multiple YUV streams with same size
1964 * as input stream have been requested, since zsl stream handle
1965 * is solely use for the purpose of overriding the size of streams
1966 * which share h/w streams we will just make a guess here as to
1967 * which of the stream is a ZSL stream, this will be refactored
1968 * once we make generic logic for streams sharing encoder output
1969 */
1970 LOGH("Warning, Multiple ip/reprocess streams requested!");
1971 }
1972 zslStream = newStream;
1973 }
1974 }
1975 }
1976
1977 /* If a zsl stream is set, we know that we have configured at least one input or
1978 bidirectional stream */
1979 if (NULL != zslStream) {
1980 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1981 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1982 mInputStreamInfo.format = zslStream->format;
1983 mInputStreamInfo.usage = zslStream->usage;
1984 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1985 mInputStreamInfo.dim.width,
1986 mInputStreamInfo.dim.height,
1987 mInputStreamInfo.format, mInputStreamInfo.usage);
1988 }
1989
1990 cleanAndSortStreamInfo();
1991 if (mMetadataChannel) {
1992 delete mMetadataChannel;
1993 mMetadataChannel = NULL;
1994 }
1995 if (mSupportChannel) {
1996 delete mSupportChannel;
1997 mSupportChannel = NULL;
1998 }
1999
2000 if (mAnalysisChannel) {
2001 delete mAnalysisChannel;
2002 mAnalysisChannel = NULL;
2003 }
2004
2005 if (mDummyBatchChannel) {
2006 delete mDummyBatchChannel;
2007 mDummyBatchChannel = NULL;
2008 }
2009
Emilian Peev7650c122017-01-19 08:24:33 -08002010 if (mDepthChannel) {
2011 mDepthChannel = NULL;
2012 }
2013
Thierry Strudel2896d122017-02-23 19:18:03 -08002014 char is_type_value[PROPERTY_VALUE_MAX];
2015 property_get("persist.camera.is_type", is_type_value, "4");
2016 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2017
Thierry Strudel3d639192016-09-09 11:52:26 -07002018 //Create metadata channel and initialize it
2019 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2020 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2021 gCamCapability[mCameraId]->color_arrangement);
2022 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2023 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002024 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002025 if (mMetadataChannel == NULL) {
2026 LOGE("failed to allocate metadata channel");
2027 rc = -ENOMEM;
2028 pthread_mutex_unlock(&mMutex);
2029 return rc;
2030 }
2031 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2032 if (rc < 0) {
2033 LOGE("metadata channel initialization failed");
2034 delete mMetadataChannel;
2035 mMetadataChannel = NULL;
2036 pthread_mutex_unlock(&mMutex);
2037 return rc;
2038 }
2039
Thierry Strudel2896d122017-02-23 19:18:03 -08002040 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002041 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002042 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002043 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2044 /* Allocate channel objects for the requested streams */
2045 for (size_t i = 0; i < streamList->num_streams; i++) {
2046 camera3_stream_t *newStream = streamList->streams[i];
2047 uint32_t stream_usage = newStream->usage;
2048 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2049 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2050 struct camera_info *p_info = NULL;
2051 pthread_mutex_lock(&gCamLock);
2052 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2053 pthread_mutex_unlock(&gCamLock);
2054 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2055 || IS_USAGE_ZSL(newStream->usage)) &&
2056 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002057 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002058 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002059 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2060 if (bUseCommonFeatureMask)
2061 zsl_ppmask = commonFeatureMask;
2062 else
2063 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002064 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002065 if (numStreamsOnEncoder > 0)
2066 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2067 else
2068 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002069 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002070 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002071 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002072 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002073 LOGH("Input stream configured, reprocess config");
2074 } else {
2075 //for non zsl streams find out the format
2076 switch (newStream->format) {
2077 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2078 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002079 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002080 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2081 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2082 /* add additional features to pp feature mask */
2083 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2084 mStreamConfigInfo.num_streams);
2085
2086 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2087 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2088 CAM_STREAM_TYPE_VIDEO;
2089 if (m_bTnrEnabled && m_bTnrVideo) {
2090 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2091 CAM_QCOM_FEATURE_CPP_TNR;
2092 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2093 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2094 ~CAM_QCOM_FEATURE_CDS;
2095 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002096 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2097 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2098 CAM_QTI_FEATURE_PPEISCORE;
2099 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002100 } else {
2101 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2102 CAM_STREAM_TYPE_PREVIEW;
2103 if (m_bTnrEnabled && m_bTnrPreview) {
2104 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2105 CAM_QCOM_FEATURE_CPP_TNR;
2106 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2108 ~CAM_QCOM_FEATURE_CDS;
2109 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002110 if(!m_bSwTnrPreview) {
2111 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2112 ~CAM_QTI_FEATURE_SW_TNR;
2113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002114 padding_info.width_padding = mSurfaceStridePadding;
2115 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002116 previewSize.width = (int32_t)newStream->width;
2117 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002118 }
2119 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2120 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2121 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2122 newStream->height;
2123 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2124 newStream->width;
2125 }
2126 }
2127 break;
2128 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002129 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002130 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2131 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2132 if (bUseCommonFeatureMask)
2133 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2134 commonFeatureMask;
2135 else
2136 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2137 CAM_QCOM_FEATURE_NONE;
2138 } else {
2139 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2140 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2141 }
2142 break;
2143 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002144 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002145 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2146 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2147 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2148 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2149 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002150 /* Remove rotation if it is not supported
2151 for 4K LiveVideo snapshot case (online processing) */
2152 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2153 CAM_QCOM_FEATURE_ROTATION)) {
2154 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2155 &= ~CAM_QCOM_FEATURE_ROTATION;
2156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002157 } else {
2158 if (bUseCommonFeatureMask &&
2159 isOnEncoder(maxViewfinderSize, newStream->width,
2160 newStream->height)) {
2161 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2162 } else {
2163 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2164 }
2165 }
2166 if (isZsl) {
2167 if (zslStream) {
2168 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2169 (int32_t)zslStream->width;
2170 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2171 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002172 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2173 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002174 } else {
2175 LOGE("Error, No ZSL stream identified");
2176 pthread_mutex_unlock(&mMutex);
2177 return -EINVAL;
2178 }
2179 } else if (m_bIs4KVideo) {
2180 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2181 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2182 } else if (bYuv888OverrideJpeg) {
2183 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2184 (int32_t)largeYuv888Size.width;
2185 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2186 (int32_t)largeYuv888Size.height;
2187 }
2188 break;
2189 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2190 case HAL_PIXEL_FORMAT_RAW16:
2191 case HAL_PIXEL_FORMAT_RAW10:
2192 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2193 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2194 isRawStreamRequested = true;
2195 break;
2196 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002197 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002198 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2199 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2200 break;
2201 }
2202 }
2203
2204 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2205 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2206 gCamCapability[mCameraId]->color_arrangement);
2207
2208 if (newStream->priv == NULL) {
2209 //New stream, construct channel
2210 switch (newStream->stream_type) {
2211 case CAMERA3_STREAM_INPUT:
2212 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2213 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2214 break;
2215 case CAMERA3_STREAM_BIDIRECTIONAL:
2216 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2217 GRALLOC_USAGE_HW_CAMERA_WRITE;
2218 break;
2219 case CAMERA3_STREAM_OUTPUT:
2220 /* For video encoding stream, set read/write rarely
2221 * flag so that they may be set to un-cached */
2222 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2223 newStream->usage |=
2224 (GRALLOC_USAGE_SW_READ_RARELY |
2225 GRALLOC_USAGE_SW_WRITE_RARELY |
2226 GRALLOC_USAGE_HW_CAMERA_WRITE);
2227 else if (IS_USAGE_ZSL(newStream->usage))
2228 {
2229 LOGD("ZSL usage flag skipping");
2230 }
2231 else if (newStream == zslStream
2232 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2233 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2234 } else
2235 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2236 break;
2237 default:
2238 LOGE("Invalid stream_type %d", newStream->stream_type);
2239 break;
2240 }
2241
2242 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2243 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2244 QCamera3ProcessingChannel *channel = NULL;
2245 switch (newStream->format) {
2246 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2247 if ((newStream->usage &
2248 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2249 (streamList->operation_mode ==
2250 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2251 ) {
2252 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2253 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002254 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002255 this,
2256 newStream,
2257 (cam_stream_type_t)
2258 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2259 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2260 mMetadataChannel,
2261 0); //heap buffers are not required for HFR video channel
2262 if (channel == NULL) {
2263 LOGE("allocation of channel failed");
2264 pthread_mutex_unlock(&mMutex);
2265 return -ENOMEM;
2266 }
2267 //channel->getNumBuffers() will return 0 here so use
2268 //MAX_INFLIGH_HFR_REQUESTS
2269 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2270 newStream->priv = channel;
2271 LOGI("num video buffers in HFR mode: %d",
2272 MAX_INFLIGHT_HFR_REQUESTS);
2273 } else {
2274 /* Copy stream contents in HFR preview only case to create
2275 * dummy batch channel so that sensor streaming is in
2276 * HFR mode */
2277 if (!m_bIsVideo && (streamList->operation_mode ==
2278 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2279 mDummyBatchStream = *newStream;
2280 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002281 int bufferCount = MAX_INFLIGHT_REQUESTS;
2282 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2283 CAM_STREAM_TYPE_VIDEO) {
2284 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2285 bufferCount = MAX_VIDEO_BUFFERS;
2286 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002287 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2288 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002289 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002290 this,
2291 newStream,
2292 (cam_stream_type_t)
2293 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2295 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002296 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002297 if (channel == NULL) {
2298 LOGE("allocation of channel failed");
2299 pthread_mutex_unlock(&mMutex);
2300 return -ENOMEM;
2301 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002302 /* disable UBWC for preview, though supported,
2303 * to take advantage of CPP duplication */
2304 if (m_bIsVideo && (!mCommon.isVideoUBWCEnabled()) &&
2305 (previewSize.width == (int32_t)videoWidth)&&
2306 (previewSize.height == (int32_t)videoHeight)){
2307 channel->setUBWCEnabled(false);
2308 }else {
2309 channel->setUBWCEnabled(true);
2310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 newStream->max_buffers = channel->getNumBuffers();
2312 newStream->priv = channel;
2313 }
2314 break;
2315 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2316 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2317 mChannelHandle,
2318 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002319 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 this,
2321 newStream,
2322 (cam_stream_type_t)
2323 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2325 mMetadataChannel);
2326 if (channel == NULL) {
2327 LOGE("allocation of YUV channel failed");
2328 pthread_mutex_unlock(&mMutex);
2329 return -ENOMEM;
2330 }
2331 newStream->max_buffers = channel->getNumBuffers();
2332 newStream->priv = channel;
2333 break;
2334 }
2335 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2336 case HAL_PIXEL_FORMAT_RAW16:
2337 case HAL_PIXEL_FORMAT_RAW10:
2338 mRawChannel = new QCamera3RawChannel(
2339 mCameraHandle->camera_handle, mChannelHandle,
2340 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002341 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002342 this, newStream,
2343 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2344 mMetadataChannel,
2345 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2346 if (mRawChannel == NULL) {
2347 LOGE("allocation of raw channel failed");
2348 pthread_mutex_unlock(&mMutex);
2349 return -ENOMEM;
2350 }
2351 newStream->max_buffers = mRawChannel->getNumBuffers();
2352 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2353 break;
2354 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002355 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2356 mDepthChannel = new QCamera3DepthChannel(
2357 mCameraHandle->camera_handle, mChannelHandle,
2358 mCameraHandle->ops, NULL, NULL, &padding_info,
2359 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2360 mMetadataChannel);
2361 if (NULL == mDepthChannel) {
2362 LOGE("Allocation of depth channel failed");
2363 pthread_mutex_unlock(&mMutex);
2364 return NO_MEMORY;
2365 }
2366 newStream->priv = mDepthChannel;
2367 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2368 } else {
2369 // Max live snapshot inflight buffer is 1. This is to mitigate
2370 // frame drop issues for video snapshot. The more buffers being
2371 // allocated, the more frame drops there are.
2372 mPictureChannel = new QCamera3PicChannel(
2373 mCameraHandle->camera_handle, mChannelHandle,
2374 mCameraHandle->ops, captureResultCb,
2375 setBufferErrorStatus, &padding_info, this, newStream,
2376 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2377 m_bIs4KVideo, isZsl, mMetadataChannel,
2378 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2379 if (mPictureChannel == NULL) {
2380 LOGE("allocation of channel failed");
2381 pthread_mutex_unlock(&mMutex);
2382 return -ENOMEM;
2383 }
2384 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2385 newStream->max_buffers = mPictureChannel->getNumBuffers();
2386 mPictureChannel->overrideYuvSize(
2387 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2388 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002390 break;
2391
2392 default:
2393 LOGE("not a supported format 0x%x", newStream->format);
2394 break;
2395 }
2396 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2397 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2398 } else {
2399 LOGE("Error, Unknown stream type");
2400 pthread_mutex_unlock(&mMutex);
2401 return -EINVAL;
2402 }
2403
2404 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2405 if (channel != NULL && channel->isUBWCEnabled()) {
2406 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002407 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2408 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2410 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2411 }
2412 }
2413
2414 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2415 it != mStreamInfo.end(); it++) {
2416 if ((*it)->stream == newStream) {
2417 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2418 break;
2419 }
2420 }
2421 } else {
2422 // Channel already exists for this stream
2423 // Do nothing for now
2424 }
2425 padding_info = gCamCapability[mCameraId]->padding_info;
2426
Emilian Peev7650c122017-01-19 08:24:33 -08002427 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 * since there is no real stream associated with it
2429 */
Emilian Peev7650c122017-01-19 08:24:33 -08002430 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2431 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002432 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002433 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002434 }
2435
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2437 onlyRaw = false;
2438 }
2439
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002440 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002441 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002442 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002443 cam_analysis_info_t analysisInfo;
2444 int32_t ret = NO_ERROR;
2445 ret = mCommon.getAnalysisInfo(
2446 FALSE,
2447 analysisFeatureMask,
2448 &analysisInfo);
2449 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002450 cam_color_filter_arrangement_t analysis_color_arrangement =
2451 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2452 CAM_FILTER_ARRANGEMENT_Y :
2453 gCamCapability[mCameraId]->color_arrangement);
2454 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2455 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002456 cam_dimension_t analysisDim;
2457 analysisDim = mCommon.getMatchingDimension(previewSize,
2458 analysisInfo.analysis_recommended_res);
2459
2460 mAnalysisChannel = new QCamera3SupportChannel(
2461 mCameraHandle->camera_handle,
2462 mChannelHandle,
2463 mCameraHandle->ops,
2464 &analysisInfo.analysis_padding_info,
2465 analysisFeatureMask,
2466 CAM_STREAM_TYPE_ANALYSIS,
2467 &analysisDim,
2468 (analysisInfo.analysis_format
2469 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2470 : CAM_FORMAT_YUV_420_NV21),
2471 analysisInfo.hw_analysis_supported,
2472 gCamCapability[mCameraId]->color_arrangement,
2473 this,
2474 0); // force buffer count to 0
2475 } else {
2476 LOGW("getAnalysisInfo failed, ret = %d", ret);
2477 }
2478 if (!mAnalysisChannel) {
2479 LOGW("Analysis channel cannot be created");
2480 }
2481 }
2482
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 //RAW DUMP channel
2484 if (mEnableRawDump && isRawStreamRequested == false){
2485 cam_dimension_t rawDumpSize;
2486 rawDumpSize = getMaxRawSize(mCameraId);
2487 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2488 setPAAFSupport(rawDumpFeatureMask,
2489 CAM_STREAM_TYPE_RAW,
2490 gCamCapability[mCameraId]->color_arrangement);
2491 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2492 mChannelHandle,
2493 mCameraHandle->ops,
2494 rawDumpSize,
2495 &padding_info,
2496 this, rawDumpFeatureMask);
2497 if (!mRawDumpChannel) {
2498 LOGE("Raw Dump channel cannot be created");
2499 pthread_mutex_unlock(&mMutex);
2500 return -ENOMEM;
2501 }
2502 }
2503
Chien-Yu Chenee335912017-02-09 17:53:20 -08002504 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002505 if (gHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002506 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002507 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2508 "HDR+ RAW source channel is not created.",
2509 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002510 } else {
2511 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2512 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2513 setPAAFSupport(hdrPlusRawFeatureMask,
2514 CAM_STREAM_TYPE_RAW,
2515 gCamCapability[mCameraId]->color_arrangement);
2516 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2517 mChannelHandle,
2518 mCameraHandle->ops,
2519 rawSize,
2520 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002521 this, hdrPlusRawFeatureMask,
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08002522 gHdrPlusClient,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002523 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002524 if (!mHdrPlusRawSrcChannel) {
2525 LOGE("HDR+ Raw Source channel cannot be created");
2526 pthread_mutex_unlock(&mMutex);
2527 return -ENOMEM;
2528 }
2529 }
2530 }
2531
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 if (mAnalysisChannel) {
2533 cam_analysis_info_t analysisInfo;
2534 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2536 CAM_STREAM_TYPE_ANALYSIS;
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002539 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002540 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2541 &analysisInfo);
2542 if (rc != NO_ERROR) {
2543 LOGE("getAnalysisInfo failed, ret = %d", rc);
2544 pthread_mutex_unlock(&mMutex);
2545 return rc;
2546 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002547 cam_color_filter_arrangement_t analysis_color_arrangement =
2548 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2549 CAM_FILTER_ARRANGEMENT_Y :
2550 gCamCapability[mCameraId]->color_arrangement);
2551 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2552 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2553 analysis_color_arrangement);
2554
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002556 mCommon.getMatchingDimension(previewSize,
2557 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002558 mStreamConfigInfo.num_streams++;
2559 }
2560
Thierry Strudel2896d122017-02-23 19:18:03 -08002561 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002562 cam_analysis_info_t supportInfo;
2563 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2564 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2565 setPAAFSupport(callbackFeatureMask,
2566 CAM_STREAM_TYPE_CALLBACK,
2567 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002568 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002569 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002570 if (ret != NO_ERROR) {
2571 /* Ignore the error for Mono camera
2572 * because the PAAF bit mask is only set
2573 * for CAM_STREAM_TYPE_ANALYSIS stream type
2574 */
2575 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2576 LOGW("getAnalysisInfo failed, ret = %d", ret);
2577 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002578 }
2579 mSupportChannel = new QCamera3SupportChannel(
2580 mCameraHandle->camera_handle,
2581 mChannelHandle,
2582 mCameraHandle->ops,
2583 &gCamCapability[mCameraId]->padding_info,
2584 callbackFeatureMask,
2585 CAM_STREAM_TYPE_CALLBACK,
2586 &QCamera3SupportChannel::kDim,
2587 CAM_FORMAT_YUV_420_NV21,
2588 supportInfo.hw_analysis_supported,
2589 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002590 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002591 if (!mSupportChannel) {
2592 LOGE("dummy channel cannot be created");
2593 pthread_mutex_unlock(&mMutex);
2594 return -ENOMEM;
2595 }
2596 }
2597
2598 if (mSupportChannel) {
2599 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2600 QCamera3SupportChannel::kDim;
2601 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2602 CAM_STREAM_TYPE_CALLBACK;
2603 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2604 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2605 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2606 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2607 gCamCapability[mCameraId]->color_arrangement);
2608 mStreamConfigInfo.num_streams++;
2609 }
2610
2611 if (mRawDumpChannel) {
2612 cam_dimension_t rawSize;
2613 rawSize = getMaxRawSize(mCameraId);
2614 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2615 rawSize;
2616 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2617 CAM_STREAM_TYPE_RAW;
2618 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2619 CAM_QCOM_FEATURE_NONE;
2620 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2621 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2622 gCamCapability[mCameraId]->color_arrangement);
2623 mStreamConfigInfo.num_streams++;
2624 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002625
2626 if (mHdrPlusRawSrcChannel) {
2627 cam_dimension_t rawSize;
2628 rawSize = getMaxRawSize(mCameraId);
2629 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2630 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2631 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2632 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2633 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2634 gCamCapability[mCameraId]->color_arrangement);
2635 mStreamConfigInfo.num_streams++;
2636 }
2637
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 /* In HFR mode, if video stream is not added, create a dummy channel so that
2639 * ISP can create a batch mode even for preview only case. This channel is
2640 * never 'start'ed (no stream-on), it is only 'initialized' */
2641 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2642 !m_bIsVideo) {
2643 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2644 setPAAFSupport(dummyFeatureMask,
2645 CAM_STREAM_TYPE_VIDEO,
2646 gCamCapability[mCameraId]->color_arrangement);
2647 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2648 mChannelHandle,
2649 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002650 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002651 this,
2652 &mDummyBatchStream,
2653 CAM_STREAM_TYPE_VIDEO,
2654 dummyFeatureMask,
2655 mMetadataChannel);
2656 if (NULL == mDummyBatchChannel) {
2657 LOGE("creation of mDummyBatchChannel failed."
2658 "Preview will use non-hfr sensor mode ");
2659 }
2660 }
2661 if (mDummyBatchChannel) {
2662 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2663 mDummyBatchStream.width;
2664 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2665 mDummyBatchStream.height;
2666 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2667 CAM_STREAM_TYPE_VIDEO;
2668 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2669 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2670 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2671 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2672 gCamCapability[mCameraId]->color_arrangement);
2673 mStreamConfigInfo.num_streams++;
2674 }
2675
2676 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2677 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002678 m_bIs4KVideo ? 0 :
2679 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002680
2681 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2682 for (pendingRequestIterator i = mPendingRequestsList.begin();
2683 i != mPendingRequestsList.end();) {
2684 i = erasePendingRequest(i);
2685 }
2686 mPendingFrameDropList.clear();
2687 // Initialize/Reset the pending buffers list
2688 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2689 req.mPendingBufferList.clear();
2690 }
2691 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2692
Thierry Strudel3d639192016-09-09 11:52:26 -07002693 mCurJpegMeta.clear();
2694 //Get min frame duration for this streams configuration
2695 deriveMinFrameDuration();
2696
Chien-Yu Chenee335912017-02-09 17:53:20 -08002697 mFirstPreviewIntentSeen = false;
2698
2699 // Disable HRD+ if it's enabled;
2700 disableHdrPlusModeLocked();
2701
Thierry Strudel3d639192016-09-09 11:52:26 -07002702 // Update state
2703 mState = CONFIGURED;
2704
2705 pthread_mutex_unlock(&mMutex);
2706
2707 return rc;
2708}
2709
2710/*===========================================================================
2711 * FUNCTION : validateCaptureRequest
2712 *
2713 * DESCRIPTION: validate a capture request from camera service
2714 *
2715 * PARAMETERS :
2716 * @request : request from framework to process
2717 *
2718 * RETURN :
2719 *
2720 *==========================================================================*/
2721int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002722 camera3_capture_request_t *request,
2723 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002724{
2725 ssize_t idx = 0;
2726 const camera3_stream_buffer_t *b;
2727 CameraMetadata meta;
2728
2729 /* Sanity check the request */
2730 if (request == NULL) {
2731 LOGE("NULL capture request");
2732 return BAD_VALUE;
2733 }
2734
2735 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2736 /*settings cannot be null for the first request*/
2737 return BAD_VALUE;
2738 }
2739
2740 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002741 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2742 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 LOGE("Request %d: No output buffers provided!",
2744 __FUNCTION__, frameNumber);
2745 return BAD_VALUE;
2746 }
2747 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2748 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2749 request->num_output_buffers, MAX_NUM_STREAMS);
2750 return BAD_VALUE;
2751 }
2752 if (request->input_buffer != NULL) {
2753 b = request->input_buffer;
2754 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2755 LOGE("Request %d: Buffer %ld: Status not OK!",
2756 frameNumber, (long)idx);
2757 return BAD_VALUE;
2758 }
2759 if (b->release_fence != -1) {
2760 LOGE("Request %d: Buffer %ld: Has a release fence!",
2761 frameNumber, (long)idx);
2762 return BAD_VALUE;
2763 }
2764 if (b->buffer == NULL) {
2765 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2766 frameNumber, (long)idx);
2767 return BAD_VALUE;
2768 }
2769 }
2770
2771 // Validate all buffers
2772 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002773 if (b == NULL) {
2774 return BAD_VALUE;
2775 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002776 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002777 QCamera3ProcessingChannel *channel =
2778 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2779 if (channel == NULL) {
2780 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2781 frameNumber, (long)idx);
2782 return BAD_VALUE;
2783 }
2784 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2785 LOGE("Request %d: Buffer %ld: Status not OK!",
2786 frameNumber, (long)idx);
2787 return BAD_VALUE;
2788 }
2789 if (b->release_fence != -1) {
2790 LOGE("Request %d: Buffer %ld: Has a release fence!",
2791 frameNumber, (long)idx);
2792 return BAD_VALUE;
2793 }
2794 if (b->buffer == NULL) {
2795 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2796 frameNumber, (long)idx);
2797 return BAD_VALUE;
2798 }
2799 if (*(b->buffer) == NULL) {
2800 LOGE("Request %d: Buffer %ld: NULL private handle!",
2801 frameNumber, (long)idx);
2802 return BAD_VALUE;
2803 }
2804 idx++;
2805 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002806 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002807 return NO_ERROR;
2808}
2809
2810/*===========================================================================
2811 * FUNCTION : deriveMinFrameDuration
2812 *
2813 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2814 * on currently configured streams.
2815 *
2816 * PARAMETERS : NONE
2817 *
2818 * RETURN : NONE
2819 *
2820 *==========================================================================*/
2821void QCamera3HardwareInterface::deriveMinFrameDuration()
2822{
2823 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2824
2825 maxJpegDim = 0;
2826 maxProcessedDim = 0;
2827 maxRawDim = 0;
2828
2829 // Figure out maximum jpeg, processed, and raw dimensions
2830 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2831 it != mStreamInfo.end(); it++) {
2832
2833 // Input stream doesn't have valid stream_type
2834 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2835 continue;
2836
2837 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2838 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2839 if (dimension > maxJpegDim)
2840 maxJpegDim = dimension;
2841 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2842 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2843 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2844 if (dimension > maxRawDim)
2845 maxRawDim = dimension;
2846 } else {
2847 if (dimension > maxProcessedDim)
2848 maxProcessedDim = dimension;
2849 }
2850 }
2851
2852 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2853 MAX_SIZES_CNT);
2854
2855 //Assume all jpeg dimensions are in processed dimensions.
2856 if (maxJpegDim > maxProcessedDim)
2857 maxProcessedDim = maxJpegDim;
2858 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2859 if (maxProcessedDim > maxRawDim) {
2860 maxRawDim = INT32_MAX;
2861
2862 for (size_t i = 0; i < count; i++) {
2863 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2864 gCamCapability[mCameraId]->raw_dim[i].height;
2865 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2866 maxRawDim = dimension;
2867 }
2868 }
2869
2870 //Find minimum durations for processed, jpeg, and raw
2871 for (size_t i = 0; i < count; i++) {
2872 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2873 gCamCapability[mCameraId]->raw_dim[i].height) {
2874 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2875 break;
2876 }
2877 }
2878 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2879 for (size_t i = 0; i < count; i++) {
2880 if (maxProcessedDim ==
2881 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2882 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2883 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2884 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2885 break;
2886 }
2887 }
2888}
2889
2890/*===========================================================================
2891 * FUNCTION : getMinFrameDuration
2892 *
2893 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2894 * and current request configuration.
2895 *
2896 * PARAMETERS : @request: requset sent by the frameworks
2897 *
2898 * RETURN : min farme duration for a particular request
2899 *
2900 *==========================================================================*/
2901int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2902{
2903 bool hasJpegStream = false;
2904 bool hasRawStream = false;
2905 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2906 const camera3_stream_t *stream = request->output_buffers[i].stream;
2907 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2908 hasJpegStream = true;
2909 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2910 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2911 stream->format == HAL_PIXEL_FORMAT_RAW16)
2912 hasRawStream = true;
2913 }
2914
2915 if (!hasJpegStream)
2916 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2917 else
2918 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2919}
2920
2921/*===========================================================================
2922 * FUNCTION : handleBuffersDuringFlushLock
2923 *
2924 * DESCRIPTION: Account for buffers returned from back-end during flush
2925 * This function is executed while mMutex is held by the caller.
2926 *
2927 * PARAMETERS :
2928 * @buffer: image buffer for the callback
2929 *
2930 * RETURN :
2931 *==========================================================================*/
2932void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2933{
2934 bool buffer_found = false;
2935 for (List<PendingBuffersInRequest>::iterator req =
2936 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2937 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2938 for (List<PendingBufferInfo>::iterator i =
2939 req->mPendingBufferList.begin();
2940 i != req->mPendingBufferList.end(); i++) {
2941 if (i->buffer == buffer->buffer) {
2942 mPendingBuffersMap.numPendingBufsAtFlush--;
2943 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2944 buffer->buffer, req->frame_number,
2945 mPendingBuffersMap.numPendingBufsAtFlush);
2946 buffer_found = true;
2947 break;
2948 }
2949 }
2950 if (buffer_found) {
2951 break;
2952 }
2953 }
2954 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2955 //signal the flush()
2956 LOGD("All buffers returned to HAL. Continue flush");
2957 pthread_cond_signal(&mBuffersCond);
2958 }
2959}
2960
Thierry Strudel3d639192016-09-09 11:52:26 -07002961/*===========================================================================
2962 * FUNCTION : handleBatchMetadata
2963 *
2964 * DESCRIPTION: Handles metadata buffer callback in batch mode
2965 *
2966 * PARAMETERS : @metadata_buf: metadata buffer
2967 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2968 * the meta buf in this method
2969 *
2970 * RETURN :
2971 *
2972 *==========================================================================*/
2973void QCamera3HardwareInterface::handleBatchMetadata(
2974 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2975{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002976 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002977
2978 if (NULL == metadata_buf) {
2979 LOGE("metadata_buf is NULL");
2980 return;
2981 }
2982 /* In batch mode, the metdata will contain the frame number and timestamp of
2983 * the last frame in the batch. Eg: a batch containing buffers from request
2984 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2985 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2986 * multiple process_capture_results */
2987 metadata_buffer_t *metadata =
2988 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2989 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2990 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2991 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2992 uint32_t frame_number = 0, urgent_frame_number = 0;
2993 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2994 bool invalid_metadata = false;
2995 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2996 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002997 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002998
2999 int32_t *p_frame_number_valid =
3000 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3001 uint32_t *p_frame_number =
3002 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3003 int64_t *p_capture_time =
3004 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3005 int32_t *p_urgent_frame_number_valid =
3006 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3007 uint32_t *p_urgent_frame_number =
3008 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3009
3010 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3011 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3012 (NULL == p_urgent_frame_number)) {
3013 LOGE("Invalid metadata");
3014 invalid_metadata = true;
3015 } else {
3016 frame_number_valid = *p_frame_number_valid;
3017 last_frame_number = *p_frame_number;
3018 last_frame_capture_time = *p_capture_time;
3019 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3020 last_urgent_frame_number = *p_urgent_frame_number;
3021 }
3022
3023 /* In batchmode, when no video buffers are requested, set_parms are sent
3024 * for every capture_request. The difference between consecutive urgent
3025 * frame numbers and frame numbers should be used to interpolate the
3026 * corresponding frame numbers and time stamps */
3027 pthread_mutex_lock(&mMutex);
3028 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003029 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3030 if(idx < 0) {
3031 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3032 last_urgent_frame_number);
3033 mState = ERROR;
3034 pthread_mutex_unlock(&mMutex);
3035 return;
3036 }
3037 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003038 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3039 first_urgent_frame_number;
3040
3041 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3042 urgent_frame_number_valid,
3043 first_urgent_frame_number, last_urgent_frame_number);
3044 }
3045
3046 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003047 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3048 if(idx < 0) {
3049 LOGE("Invalid frame number received: %d. Irrecoverable error",
3050 last_frame_number);
3051 mState = ERROR;
3052 pthread_mutex_unlock(&mMutex);
3053 return;
3054 }
3055 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003056 frameNumDiff = last_frame_number + 1 -
3057 first_frame_number;
3058 mPendingBatchMap.removeItem(last_frame_number);
3059
3060 LOGD("frm: valid: %d frm_num: %d - %d",
3061 frame_number_valid,
3062 first_frame_number, last_frame_number);
3063
3064 }
3065 pthread_mutex_unlock(&mMutex);
3066
3067 if (urgent_frame_number_valid || frame_number_valid) {
3068 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3069 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3070 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3071 urgentFrameNumDiff, last_urgent_frame_number);
3072 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3073 LOGE("frameNumDiff: %d frameNum: %d",
3074 frameNumDiff, last_frame_number);
3075 }
3076
3077 for (size_t i = 0; i < loopCount; i++) {
3078 /* handleMetadataWithLock is called even for invalid_metadata for
3079 * pipeline depth calculation */
3080 if (!invalid_metadata) {
3081 /* Infer frame number. Batch metadata contains frame number of the
3082 * last frame */
3083 if (urgent_frame_number_valid) {
3084 if (i < urgentFrameNumDiff) {
3085 urgent_frame_number =
3086 first_urgent_frame_number + i;
3087 LOGD("inferred urgent frame_number: %d",
3088 urgent_frame_number);
3089 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3090 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3091 } else {
3092 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3093 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3094 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3095 }
3096 }
3097
3098 /* Infer frame number. Batch metadata contains frame number of the
3099 * last frame */
3100 if (frame_number_valid) {
3101 if (i < frameNumDiff) {
3102 frame_number = first_frame_number + i;
3103 LOGD("inferred frame_number: %d", frame_number);
3104 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3105 CAM_INTF_META_FRAME_NUMBER, frame_number);
3106 } else {
3107 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3108 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3109 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3110 }
3111 }
3112
3113 if (last_frame_capture_time) {
3114 //Infer timestamp
3115 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003116 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003117 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003118 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003119 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3120 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3121 LOGD("batch capture_time: %lld, capture_time: %lld",
3122 last_frame_capture_time, capture_time);
3123 }
3124 }
3125 pthread_mutex_lock(&mMutex);
3126 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003127 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003128 (i == 0) /* first metadata in the batch metadata */,
3129 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003130 pthread_mutex_unlock(&mMutex);
3131 }
3132
3133 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003134 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003135 mMetadataChannel->bufDone(metadata_buf);
3136 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003137 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003138 }
3139}
3140
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003141void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3142 camera3_error_msg_code_t errorCode)
3143{
3144 camera3_notify_msg_t notify_msg;
3145 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3146 notify_msg.type = CAMERA3_MSG_ERROR;
3147 notify_msg.message.error.error_code = errorCode;
3148 notify_msg.message.error.error_stream = NULL;
3149 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003150 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003151
3152 return;
3153}
Thierry Strudel3d639192016-09-09 11:52:26 -07003154/*===========================================================================
3155 * FUNCTION : handleMetadataWithLock
3156 *
3157 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3158 *
3159 * PARAMETERS : @metadata_buf: metadata buffer
3160 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3161 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003162 * @firstMetadataInBatch: Boolean to indicate whether this is the
3163 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003164 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3165 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003166 *
3167 * RETURN :
3168 *
3169 *==========================================================================*/
3170void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003171 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003172 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003173{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003174 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003175 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3176 //during flush do not send metadata from this thread
3177 LOGD("not sending metadata during flush or when mState is error");
3178 if (free_and_bufdone_meta_buf) {
3179 mMetadataChannel->bufDone(metadata_buf);
3180 free(metadata_buf);
3181 }
3182 return;
3183 }
3184
3185 //not in flush
3186 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3187 int32_t frame_number_valid, urgent_frame_number_valid;
3188 uint32_t frame_number, urgent_frame_number;
3189 int64_t capture_time;
3190 nsecs_t currentSysTime;
3191
3192 int32_t *p_frame_number_valid =
3193 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3194 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3195 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3196 int32_t *p_urgent_frame_number_valid =
3197 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3198 uint32_t *p_urgent_frame_number =
3199 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3200 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3201 metadata) {
3202 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3203 *p_frame_number_valid, *p_frame_number);
3204 }
3205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003206 camera_metadata_t *resultMetadata = nullptr;
3207
Thierry Strudel3d639192016-09-09 11:52:26 -07003208 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3209 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3210 LOGE("Invalid metadata");
3211 if (free_and_bufdone_meta_buf) {
3212 mMetadataChannel->bufDone(metadata_buf);
3213 free(metadata_buf);
3214 }
3215 goto done_metadata;
3216 }
3217 frame_number_valid = *p_frame_number_valid;
3218 frame_number = *p_frame_number;
3219 capture_time = *p_capture_time;
3220 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3221 urgent_frame_number = *p_urgent_frame_number;
3222 currentSysTime = systemTime(CLOCK_MONOTONIC);
3223
3224 // Detect if buffers from any requests are overdue
3225 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003226 int64_t timeout;
3227 {
3228 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3229 // If there is a pending HDR+ request, the following requests may be blocked until the
3230 // HDR+ request is done. So allow a longer timeout.
3231 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3232 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3233 }
3234
3235 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003236 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003237 assert(missed.stream->priv);
3238 if (missed.stream->priv) {
3239 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3240 assert(ch->mStreams[0]);
3241 if (ch->mStreams[0]) {
3242 LOGE("Cancel missing frame = %d, buffer = %p,"
3243 "stream type = %d, stream format = %d",
3244 req.frame_number, missed.buffer,
3245 ch->mStreams[0]->getMyType(), missed.stream->format);
3246 ch->timeoutFrame(req.frame_number);
3247 }
3248 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003249 }
3250 }
3251 }
3252 //Partial result on process_capture_result for timestamp
3253 if (urgent_frame_number_valid) {
3254 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3255 urgent_frame_number, capture_time);
3256
3257 //Recieved an urgent Frame Number, handle it
3258 //using partial results
3259 for (pendingRequestIterator i =
3260 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3261 LOGD("Iterator Frame = %d urgent frame = %d",
3262 i->frame_number, urgent_frame_number);
3263
3264 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3265 (i->partial_result_cnt == 0)) {
3266 LOGE("Error: HAL missed urgent metadata for frame number %d",
3267 i->frame_number);
3268 }
3269
3270 if (i->frame_number == urgent_frame_number &&
3271 i->bUrgentReceived == 0) {
3272
3273 camera3_capture_result_t result;
3274 memset(&result, 0, sizeof(camera3_capture_result_t));
3275
3276 i->partial_result_cnt++;
3277 i->bUrgentReceived = 1;
3278 // Extract 3A metadata
3279 result.result =
3280 translateCbUrgentMetadataToResultMetadata(metadata);
3281 // Populate metadata result
3282 result.frame_number = urgent_frame_number;
3283 result.num_output_buffers = 0;
3284 result.output_buffers = NULL;
3285 result.partial_result = i->partial_result_cnt;
3286
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003287 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003288 // Notify HDR+ client about the partial metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003289 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003290 result.partial_result == PARTIAL_RESULT_COUNT);
3291 }
3292
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003293 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003294 LOGD("urgent frame_number = %u, capture_time = %lld",
3295 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003296 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3297 // Instant AEC settled for this frame.
3298 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3299 mInstantAECSettledFrameNumber = urgent_frame_number;
3300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003301 free_camera_metadata((camera_metadata_t *)result.result);
3302 break;
3303 }
3304 }
3305 }
3306
3307 if (!frame_number_valid) {
3308 LOGD("Not a valid normal frame number, used as SOF only");
3309 if (free_and_bufdone_meta_buf) {
3310 mMetadataChannel->bufDone(metadata_buf);
3311 free(metadata_buf);
3312 }
3313 goto done_metadata;
3314 }
3315 LOGH("valid frame_number = %u, capture_time = %lld",
3316 frame_number, capture_time);
3317
Emilian Peev7650c122017-01-19 08:24:33 -08003318 if (metadata->is_depth_data_valid) {
3319 handleDepthDataLocked(metadata->depth_data, frame_number);
3320 }
3321
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003322 // Check whether any stream buffer corresponding to this is dropped or not
3323 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3324 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3325 for (auto & pendingRequest : mPendingRequestsList) {
3326 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3327 mInstantAECSettledFrameNumber)) {
3328 camera3_notify_msg_t notify_msg = {};
3329 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003330 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003331 QCamera3ProcessingChannel *channel =
3332 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003333 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003334 if (p_cam_frame_drop) {
3335 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003336 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003337 // Got the stream ID for drop frame.
3338 dropFrame = true;
3339 break;
3340 }
3341 }
3342 } else {
3343 // This is instant AEC case.
3344 // For instant AEC drop the stream untill AEC is settled.
3345 dropFrame = true;
3346 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003347
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003348 if (dropFrame) {
3349 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3350 if (p_cam_frame_drop) {
3351 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003352 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003353 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003354 } else {
3355 // For instant AEC, inform frame drop and frame number
3356 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3357 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003358 pendingRequest.frame_number, streamID,
3359 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003360 }
3361 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003362 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003363 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003364 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003365 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003366 if (p_cam_frame_drop) {
3367 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003368 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003369 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003370 } else {
3371 // For instant AEC, inform frame drop and frame number
3372 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3373 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003374 pendingRequest.frame_number, streamID,
3375 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003376 }
3377 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003378 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003379 PendingFrameDrop.stream_ID = streamID;
3380 // Add the Frame drop info to mPendingFrameDropList
3381 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003383 }
3384 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003385 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003386
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 for (auto & pendingRequest : mPendingRequestsList) {
3388 // Find the pending request with the frame number.
3389 if (pendingRequest.frame_number == frame_number) {
3390 // Update the sensor timestamp.
3391 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003392
Thierry Strudel3d639192016-09-09 11:52:26 -07003393
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003394 /* Set the timestamp in display metadata so that clients aware of
3395 private_handle such as VT can use this un-modified timestamps.
3396 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003397 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003398
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 // Find channel requiring metadata, meaning internal offline postprocess
3400 // is needed.
3401 //TODO: for now, we don't support two streams requiring metadata at the same time.
3402 // (because we are not making copies, and metadata buffer is not reference counted.
3403 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003404 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3405 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003406 if (iter->need_metadata) {
3407 internalPproc = true;
3408 QCamera3ProcessingChannel *channel =
3409 (QCamera3ProcessingChannel *)iter->stream->priv;
3410 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003411 if(p_is_metabuf_queued != NULL) {
3412 *p_is_metabuf_queued = true;
3413 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003414 break;
3415 }
3416 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003417 for (auto itr = pendingRequest.internalRequestList.begin();
3418 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003419 if (itr->need_metadata) {
3420 internalPproc = true;
3421 QCamera3ProcessingChannel *channel =
3422 (QCamera3ProcessingChannel *)itr->stream->priv;
3423 channel->queueReprocMetadata(metadata_buf);
3424 break;
3425 }
3426 }
3427
Thierry Strudel54dc9782017-02-15 12:12:10 -08003428 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003429 resultMetadata = translateFromHalMetadata(metadata,
3430 pendingRequest.timestamp, pendingRequest.request_id,
3431 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3432 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003433 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003434 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003435 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003436 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003437 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003438 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003439
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003440 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003441
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003442 if (pendingRequest.blob_request) {
3443 //Dump tuning metadata if enabled and available
3444 char prop[PROPERTY_VALUE_MAX];
3445 memset(prop, 0, sizeof(prop));
3446 property_get("persist.camera.dumpmetadata", prop, "0");
3447 int32_t enabled = atoi(prop);
3448 if (enabled && metadata->is_tuning_params_valid) {
3449 dumpMetadataToFile(metadata->tuning_params,
3450 mMetaFrameCount,
3451 enabled,
3452 "Snapshot",
3453 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003454 }
3455 }
3456
3457 if (!internalPproc) {
3458 LOGD("couldn't find need_metadata for this metadata");
3459 // Return metadata buffer
3460 if (free_and_bufdone_meta_buf) {
3461 mMetadataChannel->bufDone(metadata_buf);
3462 free(metadata_buf);
3463 }
3464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003465
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003466 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003467 }
3468 }
3469
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003470 // Try to send out shutter callbacks and capture results.
3471 handlePendingResultsWithLock(frame_number, resultMetadata);
3472 return;
3473
Thierry Strudel3d639192016-09-09 11:52:26 -07003474done_metadata:
3475 for (pendingRequestIterator i = mPendingRequestsList.begin();
3476 i != mPendingRequestsList.end() ;i++) {
3477 i->pipeline_depth++;
3478 }
3479 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3480 unblockRequestIfNecessary();
3481}
3482
3483/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003484 * FUNCTION : handleDepthDataWithLock
3485 *
3486 * DESCRIPTION: Handles incoming depth data
3487 *
3488 * PARAMETERS : @depthData : Depth data
3489 * @frameNumber: Frame number of the incoming depth data
3490 *
3491 * RETURN :
3492 *
3493 *==========================================================================*/
3494void QCamera3HardwareInterface::handleDepthDataLocked(
3495 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3496 uint32_t currentFrameNumber;
3497 buffer_handle_t *depthBuffer;
3498
3499 if (nullptr == mDepthChannel) {
3500 LOGE("Depth channel not present!");
3501 return;
3502 }
3503
3504 camera3_stream_buffer_t resultBuffer =
3505 {.acquire_fence = -1,
3506 .release_fence = -1,
3507 .status = CAMERA3_BUFFER_STATUS_OK,
3508 .buffer = nullptr,
3509 .stream = mDepthChannel->getStream()};
3510 camera3_capture_result_t result =
3511 {.result = nullptr,
3512 .num_output_buffers = 1,
3513 .output_buffers = &resultBuffer,
3514 .partial_result = 0,
3515 .frame_number = 0};
3516
3517 do {
3518 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3519 if (nullptr == depthBuffer) {
3520 break;
3521 }
3522
3523 result.frame_number = currentFrameNumber;
3524 resultBuffer.buffer = depthBuffer;
3525 if (currentFrameNumber == frameNumber) {
3526 int32_t rc = mDepthChannel->populateDepthData(depthData,
3527 frameNumber);
3528 if (NO_ERROR != rc) {
3529 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3530 } else {
3531 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3532 }
3533 } else if (currentFrameNumber > frameNumber) {
3534 break;
3535 } else {
3536 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3537 {{currentFrameNumber, mDepthChannel->getStream(),
3538 CAMERA3_MSG_ERROR_BUFFER}}};
3539 orchestrateNotify(&notify_msg);
3540
3541 LOGE("Depth buffer for frame number: %d is missing "
3542 "returning back!", currentFrameNumber);
3543 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3544 }
3545 mDepthChannel->unmapBuffer(currentFrameNumber);
3546
3547 orchestrateResult(&result);
3548 } while (currentFrameNumber < frameNumber);
3549}
3550
3551/*===========================================================================
3552 * FUNCTION : notifyErrorFoPendingDepthData
3553 *
3554 * DESCRIPTION: Returns error for any pending depth buffers
3555 *
3556 * PARAMETERS : depthCh - depth channel that needs to get flushed
3557 *
3558 * RETURN :
3559 *
3560 *==========================================================================*/
3561void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3562 QCamera3DepthChannel *depthCh) {
3563 uint32_t currentFrameNumber;
3564 buffer_handle_t *depthBuffer;
3565
3566 if (nullptr == depthCh) {
3567 return;
3568 }
3569
3570 camera3_notify_msg_t notify_msg =
3571 {.type = CAMERA3_MSG_ERROR,
3572 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3573 camera3_stream_buffer_t resultBuffer =
3574 {.acquire_fence = -1,
3575 .release_fence = -1,
3576 .buffer = nullptr,
3577 .stream = depthCh->getStream(),
3578 .status = CAMERA3_BUFFER_STATUS_ERROR};
3579 camera3_capture_result_t result =
3580 {.result = nullptr,
3581 .frame_number = 0,
3582 .num_output_buffers = 1,
3583 .partial_result = 0,
3584 .output_buffers = &resultBuffer};
3585
3586 while (nullptr !=
3587 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3588 depthCh->unmapBuffer(currentFrameNumber);
3589
3590 notify_msg.message.error.frame_number = currentFrameNumber;
3591 orchestrateNotify(&notify_msg);
3592
3593 resultBuffer.buffer = depthBuffer;
3594 result.frame_number = currentFrameNumber;
3595 orchestrateResult(&result);
3596 };
3597}
3598
3599/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 * FUNCTION : hdrPlusPerfLock
3601 *
3602 * DESCRIPTION: perf lock for HDR+ using custom intent
3603 *
3604 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3605 *
3606 * RETURN : None
3607 *
3608 *==========================================================================*/
3609void QCamera3HardwareInterface::hdrPlusPerfLock(
3610 mm_camera_super_buf_t *metadata_buf)
3611{
3612 if (NULL == metadata_buf) {
3613 LOGE("metadata_buf is NULL");
3614 return;
3615 }
3616 metadata_buffer_t *metadata =
3617 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3618 int32_t *p_frame_number_valid =
3619 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3620 uint32_t *p_frame_number =
3621 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3622
3623 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3624 LOGE("%s: Invalid metadata", __func__);
3625 return;
3626 }
3627
3628 //acquire perf lock for 5 sec after the last HDR frame is captured
3629 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3630 if ((p_frame_number != NULL) &&
3631 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003632 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 }
3634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003635}
3636
3637/*===========================================================================
3638 * FUNCTION : handleInputBufferWithLock
3639 *
3640 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3641 *
3642 * PARAMETERS : @frame_number: frame number of the input buffer
3643 *
3644 * RETURN :
3645 *
3646 *==========================================================================*/
3647void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3648{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003649 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003650 pendingRequestIterator i = mPendingRequestsList.begin();
3651 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3652 i++;
3653 }
3654 if (i != mPendingRequestsList.end() && i->input_buffer) {
3655 //found the right request
3656 if (!i->shutter_notified) {
3657 CameraMetadata settings;
3658 camera3_notify_msg_t notify_msg;
3659 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3660 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3661 if(i->settings) {
3662 settings = i->settings;
3663 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3664 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3665 } else {
3666 LOGE("No timestamp in input settings! Using current one.");
3667 }
3668 } else {
3669 LOGE("Input settings missing!");
3670 }
3671
3672 notify_msg.type = CAMERA3_MSG_SHUTTER;
3673 notify_msg.message.shutter.frame_number = frame_number;
3674 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003675 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003676 i->shutter_notified = true;
3677 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3678 i->frame_number, notify_msg.message.shutter.timestamp);
3679 }
3680
3681 if (i->input_buffer->release_fence != -1) {
3682 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3683 close(i->input_buffer->release_fence);
3684 if (rc != OK) {
3685 LOGE("input buffer sync wait failed %d", rc);
3686 }
3687 }
3688
3689 camera3_capture_result result;
3690 memset(&result, 0, sizeof(camera3_capture_result));
3691 result.frame_number = frame_number;
3692 result.result = i->settings;
3693 result.input_buffer = i->input_buffer;
3694 result.partial_result = PARTIAL_RESULT_COUNT;
3695
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003696 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003697 LOGD("Input request metadata and input buffer frame_number = %u",
3698 i->frame_number);
3699 i = erasePendingRequest(i);
3700 } else {
3701 LOGE("Could not find input request for frame number %d", frame_number);
3702 }
3703}
3704
3705/*===========================================================================
3706 * FUNCTION : handleBufferWithLock
3707 *
3708 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3709 *
3710 * PARAMETERS : @buffer: image buffer for the callback
3711 * @frame_number: frame number of the image buffer
3712 *
3713 * RETURN :
3714 *
3715 *==========================================================================*/
3716void QCamera3HardwareInterface::handleBufferWithLock(
3717 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3718{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003719 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003720
3721 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3722 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 /* Nothing to be done during error state */
3726 if ((ERROR == mState) || (DEINIT == mState)) {
3727 return;
3728 }
3729 if (mFlushPerf) {
3730 handleBuffersDuringFlushLock(buffer);
3731 return;
3732 }
3733 //not in flush
3734 // If the frame number doesn't exist in the pending request list,
3735 // directly send the buffer to the frameworks, and update pending buffers map
3736 // Otherwise, book-keep the buffer.
3737 pendingRequestIterator i = mPendingRequestsList.begin();
3738 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3739 i++;
3740 }
3741 if (i == mPendingRequestsList.end()) {
3742 // Verify all pending requests frame_numbers are greater
3743 for (pendingRequestIterator j = mPendingRequestsList.begin();
3744 j != mPendingRequestsList.end(); j++) {
3745 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3746 LOGW("Error: pending live frame number %d is smaller than %d",
3747 j->frame_number, frame_number);
3748 }
3749 }
3750 camera3_capture_result_t result;
3751 memset(&result, 0, sizeof(camera3_capture_result_t));
3752 result.result = NULL;
3753 result.frame_number = frame_number;
3754 result.num_output_buffers = 1;
3755 result.partial_result = 0;
3756 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3757 m != mPendingFrameDropList.end(); m++) {
3758 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3759 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3760 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3761 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3762 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3763 frame_number, streamID);
3764 m = mPendingFrameDropList.erase(m);
3765 break;
3766 }
3767 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003768 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003769 result.output_buffers = buffer;
3770 LOGH("result frame_number = %d, buffer = %p",
3771 frame_number, buffer->buffer);
3772
3773 mPendingBuffersMap.removeBuf(buffer->buffer);
3774
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003775 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003776 } else {
3777 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003778 if (i->input_buffer->release_fence != -1) {
3779 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3780 close(i->input_buffer->release_fence);
3781 if (rc != OK) {
3782 LOGE("input buffer sync wait failed %d", rc);
3783 }
3784 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003786
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003787 // Put buffer into the pending request
3788 for (auto &requestedBuffer : i->buffers) {
3789 if (requestedBuffer.stream == buffer->stream) {
3790 if (requestedBuffer.buffer != nullptr) {
3791 LOGE("Error: buffer is already set");
3792 } else {
3793 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3794 sizeof(camera3_stream_buffer_t));
3795 *(requestedBuffer.buffer) = *buffer;
3796 LOGH("cache buffer %p at result frame_number %u",
3797 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003798 }
3799 }
3800 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801
3802 if (i->input_buffer) {
3803 // For a reprocessing request, try to send out shutter callback and result metadata.
3804 handlePendingResultsWithLock(frame_number, nullptr);
3805 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003806 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003807
3808 if (mPreviewStarted == false) {
3809 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3810 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3811 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3812 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3813 mPreviewStarted = true;
3814
3815 // Set power hint for preview
3816 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3817 }
3818 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003819}
3820
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3822 const camera_metadata_t *resultMetadata)
3823{
3824 // Find the pending request for this result metadata.
3825 auto requestIter = mPendingRequestsList.begin();
3826 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3827 requestIter++;
3828 }
3829
3830 if (requestIter == mPendingRequestsList.end()) {
3831 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3832 return;
3833 }
3834
3835 // Update the result metadata
3836 requestIter->resultMetadata = resultMetadata;
3837
3838 // Check what type of request this is.
3839 bool liveRequest = false;
3840 if (requestIter->hdrplus) {
3841 // HDR+ request doesn't have partial results.
3842 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3843 } else if (requestIter->input_buffer != nullptr) {
3844 // Reprocessing request result is the same as settings.
3845 requestIter->resultMetadata = requestIter->settings;
3846 // Reprocessing request doesn't have partial results.
3847 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3848 } else {
3849 liveRequest = true;
3850 requestIter->partial_result_cnt++;
3851 mPendingLiveRequest--;
3852
3853 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08003854 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3855 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003856 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3857 }
3858 }
3859
3860 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3861 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3862 bool readyToSend = true;
3863
3864 // Iterate through the pending requests to send out shutter callbacks and results that are
3865 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3866 // live requests that don't have result metadata yet.
3867 auto iter = mPendingRequestsList.begin();
3868 while (iter != mPendingRequestsList.end()) {
3869 // Check if current pending request is ready. If it's not ready, the following pending
3870 // requests are also not ready.
3871 if (readyToSend && iter->resultMetadata == nullptr) {
3872 readyToSend = false;
3873 }
3874
3875 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3876
3877 std::vector<camera3_stream_buffer_t> outputBuffers;
3878
3879 camera3_capture_result_t result = {};
3880 result.frame_number = iter->frame_number;
3881 result.result = iter->resultMetadata;
3882 result.partial_result = iter->partial_result_cnt;
3883
3884 // If this pending buffer has result metadata, we may be able to send out shutter callback
3885 // and result metadata.
3886 if (iter->resultMetadata != nullptr) {
3887 if (!readyToSend) {
3888 // If any of the previous pending request is not ready, this pending request is
3889 // also not ready to send in order to keep shutter callbacks and result metadata
3890 // in order.
3891 iter++;
3892 continue;
3893 }
3894
3895 // Invoke shutter callback if not yet.
3896 if (!iter->shutter_notified) {
3897 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3898
3899 // Find the timestamp in HDR+ result metadata
3900 camera_metadata_ro_entry_t entry;
3901 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3902 ANDROID_SENSOR_TIMESTAMP, &entry);
3903 if (res != OK) {
3904 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3905 __FUNCTION__, iter->frame_number, strerror(-res), res);
3906 } else {
3907 timestamp = entry.data.i64[0];
3908 }
3909
3910 camera3_notify_msg_t notify_msg = {};
3911 notify_msg.type = CAMERA3_MSG_SHUTTER;
3912 notify_msg.message.shutter.frame_number = iter->frame_number;
3913 notify_msg.message.shutter.timestamp = timestamp;
3914 orchestrateNotify(&notify_msg);
3915 iter->shutter_notified = true;
3916 }
3917
3918 result.input_buffer = iter->input_buffer;
3919
3920 // Prepare output buffer array
3921 for (auto bufferInfoIter = iter->buffers.begin();
3922 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3923 if (bufferInfoIter->buffer != nullptr) {
3924
3925 QCamera3Channel *channel =
3926 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3927 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3928
3929 // Check if this buffer is a dropped frame.
3930 auto frameDropIter = mPendingFrameDropList.begin();
3931 while (frameDropIter != mPendingFrameDropList.end()) {
3932 if((frameDropIter->stream_ID == streamID) &&
3933 (frameDropIter->frame_number == frameNumber)) {
3934 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3935 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3936 streamID);
3937 mPendingFrameDropList.erase(frameDropIter);
3938 break;
3939 } else {
3940 frameDropIter++;
3941 }
3942 }
3943
3944 // Check buffer error status
3945 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3946 bufferInfoIter->buffer->buffer);
3947 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3948
3949 outputBuffers.push_back(*(bufferInfoIter->buffer));
3950 free(bufferInfoIter->buffer);
3951 bufferInfoIter->buffer = NULL;
3952 }
3953 }
3954
3955 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3956 result.num_output_buffers = outputBuffers.size();
3957 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3958 // If the result metadata belongs to a live request, notify errors for previous pending
3959 // live requests.
3960 mPendingLiveRequest--;
3961
3962 CameraMetadata dummyMetadata;
3963 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3964 result.result = dummyMetadata.release();
3965
3966 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3967 } else {
3968 iter++;
3969 continue;
3970 }
3971
3972 orchestrateResult(&result);
3973
3974 // For reprocessing, result metadata is the same as settings so do not free it here to
3975 // avoid double free.
3976 if (result.result != iter->settings) {
3977 free_camera_metadata((camera_metadata_t *)result.result);
3978 }
3979 iter->resultMetadata = nullptr;
3980 iter = erasePendingRequest(iter);
3981 }
3982
3983 if (liveRequest) {
3984 for (auto &iter : mPendingRequestsList) {
3985 // Increment pipeline depth for the following pending requests.
3986 if (iter.frame_number > frameNumber) {
3987 iter.pipeline_depth++;
3988 }
3989 }
3990 }
3991
3992 unblockRequestIfNecessary();
3993}
3994
Thierry Strudel3d639192016-09-09 11:52:26 -07003995/*===========================================================================
3996 * FUNCTION : unblockRequestIfNecessary
3997 *
3998 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3999 * that mMutex is held when this function is called.
4000 *
4001 * PARAMETERS :
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::unblockRequestIfNecessary()
4007{
4008 // Unblock process_capture_request
4009 pthread_cond_signal(&mRequestCond);
4010}
4011
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004012/*===========================================================================
4013 * FUNCTION : isHdrSnapshotRequest
4014 *
4015 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4016 *
4017 * PARAMETERS : camera3 request structure
4018 *
4019 * RETURN : boolean decision variable
4020 *
4021 *==========================================================================*/
4022bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4023{
4024 if (request == NULL) {
4025 LOGE("Invalid request handle");
4026 assert(0);
4027 return false;
4028 }
4029
4030 if (!mForceHdrSnapshot) {
4031 CameraMetadata frame_settings;
4032 frame_settings = request->settings;
4033
4034 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4035 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4036 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4037 return false;
4038 }
4039 } else {
4040 return false;
4041 }
4042
4043 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4044 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4045 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4046 return false;
4047 }
4048 } else {
4049 return false;
4050 }
4051 }
4052
4053 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4054 if (request->output_buffers[i].stream->format
4055 == HAL_PIXEL_FORMAT_BLOB) {
4056 return true;
4057 }
4058 }
4059
4060 return false;
4061}
4062/*===========================================================================
4063 * FUNCTION : orchestrateRequest
4064 *
4065 * DESCRIPTION: Orchestrates a capture request from camera service
4066 *
4067 * PARAMETERS :
4068 * @request : request from framework to process
4069 *
4070 * RETURN : Error status codes
4071 *
4072 *==========================================================================*/
4073int32_t QCamera3HardwareInterface::orchestrateRequest(
4074 camera3_capture_request_t *request)
4075{
4076
4077 uint32_t originalFrameNumber = request->frame_number;
4078 uint32_t originalOutputCount = request->num_output_buffers;
4079 const camera_metadata_t *original_settings = request->settings;
4080 List<InternalRequest> internallyRequestedStreams;
4081 List<InternalRequest> emptyInternalList;
4082
4083 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4084 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4085 uint32_t internalFrameNumber;
4086 CameraMetadata modified_meta;
4087
4088
4089 /* Add Blob channel to list of internally requested streams */
4090 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4091 if (request->output_buffers[i].stream->format
4092 == HAL_PIXEL_FORMAT_BLOB) {
4093 InternalRequest streamRequested;
4094 streamRequested.meteringOnly = 1;
4095 streamRequested.need_metadata = 0;
4096 streamRequested.stream = request->output_buffers[i].stream;
4097 internallyRequestedStreams.push_back(streamRequested);
4098 }
4099 }
4100 request->num_output_buffers = 0;
4101 auto itr = internallyRequestedStreams.begin();
4102
4103 /* Modify setting to set compensation */
4104 modified_meta = request->settings;
4105 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4106 uint8_t aeLock = 1;
4107 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4108 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4109 camera_metadata_t *modified_settings = modified_meta.release();
4110 request->settings = modified_settings;
4111
4112 /* Capture Settling & -2x frame */
4113 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4114 request->frame_number = internalFrameNumber;
4115 processCaptureRequest(request, internallyRequestedStreams);
4116
4117 request->num_output_buffers = originalOutputCount;
4118 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4119 request->frame_number = internalFrameNumber;
4120 processCaptureRequest(request, emptyInternalList);
4121 request->num_output_buffers = 0;
4122
4123 modified_meta = modified_settings;
4124 expCompensation = 0;
4125 aeLock = 1;
4126 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4127 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4128 modified_settings = modified_meta.release();
4129 request->settings = modified_settings;
4130
4131 /* Capture Settling & 0X frame */
4132
4133 itr = internallyRequestedStreams.begin();
4134 if (itr == internallyRequestedStreams.end()) {
4135 LOGE("Error Internally Requested Stream list is empty");
4136 assert(0);
4137 } else {
4138 itr->need_metadata = 0;
4139 itr->meteringOnly = 1;
4140 }
4141
4142 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4143 request->frame_number = internalFrameNumber;
4144 processCaptureRequest(request, internallyRequestedStreams);
4145
4146 itr = internallyRequestedStreams.begin();
4147 if (itr == internallyRequestedStreams.end()) {
4148 ALOGE("Error Internally Requested Stream list is empty");
4149 assert(0);
4150 } else {
4151 itr->need_metadata = 1;
4152 itr->meteringOnly = 0;
4153 }
4154
4155 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4156 request->frame_number = internalFrameNumber;
4157 processCaptureRequest(request, internallyRequestedStreams);
4158
4159 /* Capture 2X frame*/
4160 modified_meta = modified_settings;
4161 expCompensation = GB_HDR_2X_STEP_EV;
4162 aeLock = 1;
4163 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4164 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4165 modified_settings = modified_meta.release();
4166 request->settings = modified_settings;
4167
4168 itr = internallyRequestedStreams.begin();
4169 if (itr == internallyRequestedStreams.end()) {
4170 ALOGE("Error Internally Requested Stream list is empty");
4171 assert(0);
4172 } else {
4173 itr->need_metadata = 0;
4174 itr->meteringOnly = 1;
4175 }
4176 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4177 request->frame_number = internalFrameNumber;
4178 processCaptureRequest(request, internallyRequestedStreams);
4179
4180 itr = internallyRequestedStreams.begin();
4181 if (itr == internallyRequestedStreams.end()) {
4182 ALOGE("Error Internally Requested Stream list is empty");
4183 assert(0);
4184 } else {
4185 itr->need_metadata = 1;
4186 itr->meteringOnly = 0;
4187 }
4188
4189 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4190 request->frame_number = internalFrameNumber;
4191 processCaptureRequest(request, internallyRequestedStreams);
4192
4193
4194 /* Capture 2X on original streaming config*/
4195 internallyRequestedStreams.clear();
4196
4197 /* Restore original settings pointer */
4198 request->settings = original_settings;
4199 } else {
4200 uint32_t internalFrameNumber;
4201 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4202 request->frame_number = internalFrameNumber;
4203 return processCaptureRequest(request, internallyRequestedStreams);
4204 }
4205
4206 return NO_ERROR;
4207}
4208
4209/*===========================================================================
4210 * FUNCTION : orchestrateResult
4211 *
4212 * DESCRIPTION: Orchestrates a capture result to camera service
4213 *
4214 * PARAMETERS :
4215 * @request : request from framework to process
4216 *
4217 * RETURN :
4218 *
4219 *==========================================================================*/
4220void QCamera3HardwareInterface::orchestrateResult(
4221 camera3_capture_result_t *result)
4222{
4223 uint32_t frameworkFrameNumber;
4224 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4225 frameworkFrameNumber);
4226 if (rc != NO_ERROR) {
4227 LOGE("Cannot find translated frameworkFrameNumber");
4228 assert(0);
4229 } else {
4230 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004231 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004232 } else {
4233 result->frame_number = frameworkFrameNumber;
4234 mCallbackOps->process_capture_result(mCallbackOps, result);
4235 }
4236 }
4237}
4238
4239/*===========================================================================
4240 * FUNCTION : orchestrateNotify
4241 *
4242 * DESCRIPTION: Orchestrates a notify to camera service
4243 *
4244 * PARAMETERS :
4245 * @request : request from framework to process
4246 *
4247 * RETURN :
4248 *
4249 *==========================================================================*/
4250void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4251{
4252 uint32_t frameworkFrameNumber;
4253 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004254 int32_t rc = NO_ERROR;
4255
4256 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004257 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004258
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004259 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004260 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4261 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4262 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004263 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004264 LOGE("Cannot find translated frameworkFrameNumber");
4265 assert(0);
4266 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004267 }
4268 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004269
4270 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4271 LOGD("Internal Request drop the notifyCb");
4272 } else {
4273 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4274 mCallbackOps->notify(mCallbackOps, notify_msg);
4275 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004276}
4277
4278/*===========================================================================
4279 * FUNCTION : FrameNumberRegistry
4280 *
4281 * DESCRIPTION: Constructor
4282 *
4283 * PARAMETERS :
4284 *
4285 * RETURN :
4286 *
4287 *==========================================================================*/
4288FrameNumberRegistry::FrameNumberRegistry()
4289{
4290 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4291}
4292
4293/*===========================================================================
4294 * FUNCTION : ~FrameNumberRegistry
4295 *
4296 * DESCRIPTION: Destructor
4297 *
4298 * PARAMETERS :
4299 *
4300 * RETURN :
4301 *
4302 *==========================================================================*/
4303FrameNumberRegistry::~FrameNumberRegistry()
4304{
4305}
4306
4307/*===========================================================================
4308 * FUNCTION : PurgeOldEntriesLocked
4309 *
4310 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4311 *
4312 * PARAMETERS :
4313 *
4314 * RETURN : NONE
4315 *
4316 *==========================================================================*/
4317void FrameNumberRegistry::purgeOldEntriesLocked()
4318{
4319 while (_register.begin() != _register.end()) {
4320 auto itr = _register.begin();
4321 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4322 _register.erase(itr);
4323 } else {
4324 return;
4325 }
4326 }
4327}
4328
4329/*===========================================================================
4330 * FUNCTION : allocStoreInternalFrameNumber
4331 *
4332 * DESCRIPTION: Method to note down a framework request and associate a new
4333 * internal request number against it
4334 *
4335 * PARAMETERS :
4336 * @fFrameNumber: Identifier given by framework
4337 * @internalFN : Output parameter which will have the newly generated internal
4338 * entry
4339 *
4340 * RETURN : Error code
4341 *
4342 *==========================================================================*/
4343int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4344 uint32_t &internalFrameNumber)
4345{
4346 Mutex::Autolock lock(mRegistryLock);
4347 internalFrameNumber = _nextFreeInternalNumber++;
4348 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4349 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4350 purgeOldEntriesLocked();
4351 return NO_ERROR;
4352}
4353
4354/*===========================================================================
4355 * FUNCTION : generateStoreInternalFrameNumber
4356 *
4357 * DESCRIPTION: Method to associate a new internal request number independent
4358 * of any associate with framework requests
4359 *
4360 * PARAMETERS :
4361 * @internalFrame#: Output parameter which will have the newly generated internal
4362 *
4363 *
4364 * RETURN : Error code
4365 *
4366 *==========================================================================*/
4367int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4368{
4369 Mutex::Autolock lock(mRegistryLock);
4370 internalFrameNumber = _nextFreeInternalNumber++;
4371 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4372 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4373 purgeOldEntriesLocked();
4374 return NO_ERROR;
4375}
4376
4377/*===========================================================================
4378 * FUNCTION : getFrameworkFrameNumber
4379 *
4380 * DESCRIPTION: Method to query the framework framenumber given an internal #
4381 *
4382 * PARAMETERS :
4383 * @internalFrame#: Internal reference
4384 * @frameworkframenumber: Output parameter holding framework frame entry
4385 *
4386 * RETURN : Error code
4387 *
4388 *==========================================================================*/
4389int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4390 uint32_t &frameworkFrameNumber)
4391{
4392 Mutex::Autolock lock(mRegistryLock);
4393 auto itr = _register.find(internalFrameNumber);
4394 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004395 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004396 return -ENOENT;
4397 }
4398
4399 frameworkFrameNumber = itr->second;
4400 purgeOldEntriesLocked();
4401 return NO_ERROR;
4402}
Thierry Strudel3d639192016-09-09 11:52:26 -07004403
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004404status_t QCamera3HardwareInterface::fillPbStreamConfig(
4405 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4406 QCamera3Channel *channel, uint32_t streamIndex) {
4407 if (config == nullptr) {
4408 LOGE("%s: config is null", __FUNCTION__);
4409 return BAD_VALUE;
4410 }
4411
4412 if (channel == nullptr) {
4413 LOGE("%s: channel is null", __FUNCTION__);
4414 return BAD_VALUE;
4415 }
4416
4417 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4418 if (stream == nullptr) {
4419 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4420 return NAME_NOT_FOUND;
4421 }
4422
4423 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4424 if (streamInfo == nullptr) {
4425 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4426 return NAME_NOT_FOUND;
4427 }
4428
4429 config->id = pbStreamId;
4430 config->image.width = streamInfo->dim.width;
4431 config->image.height = streamInfo->dim.height;
4432 config->image.padding = 0;
4433 config->image.format = pbStreamFormat;
4434
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004435 uint32_t totalPlaneSize = 0;
4436
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004437 // Fill plane information.
4438 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4439 pbcamera::PlaneConfiguration plane;
4440 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4441 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4442 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004443
4444 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004445 }
4446
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004447 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004448 return OK;
4449}
4450
Thierry Strudel3d639192016-09-09 11:52:26 -07004451/*===========================================================================
4452 * FUNCTION : processCaptureRequest
4453 *
4454 * DESCRIPTION: process a capture request from camera service
4455 *
4456 * PARAMETERS :
4457 * @request : request from framework to process
4458 *
4459 * RETURN :
4460 *
4461 *==========================================================================*/
4462int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004463 camera3_capture_request_t *request,
4464 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004465{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004466 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004467 int rc = NO_ERROR;
4468 int32_t request_id;
4469 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004470 bool isVidBufRequested = false;
4471 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004472 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004473
4474 pthread_mutex_lock(&mMutex);
4475
4476 // Validate current state
4477 switch (mState) {
4478 case CONFIGURED:
4479 case STARTED:
4480 /* valid state */
4481 break;
4482
4483 case ERROR:
4484 pthread_mutex_unlock(&mMutex);
4485 handleCameraDeviceError();
4486 return -ENODEV;
4487
4488 default:
4489 LOGE("Invalid state %d", mState);
4490 pthread_mutex_unlock(&mMutex);
4491 return -ENODEV;
4492 }
4493
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004494 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004495 if (rc != NO_ERROR) {
4496 LOGE("incoming request is not valid");
4497 pthread_mutex_unlock(&mMutex);
4498 return rc;
4499 }
4500
4501 meta = request->settings;
4502
4503 // For first capture request, send capture intent, and
4504 // stream on all streams
4505 if (mState == CONFIGURED) {
4506 // send an unconfigure to the backend so that the isp
4507 // resources are deallocated
4508 if (!mFirstConfiguration) {
4509 cam_stream_size_info_t stream_config_info;
4510 int32_t hal_version = CAM_HAL_V3;
4511 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4512 stream_config_info.buffer_info.min_buffers =
4513 MIN_INFLIGHT_REQUESTS;
4514 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004515 m_bIs4KVideo ? 0 :
4516 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004517 clear_metadata_buffer(mParameters);
4518 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4519 CAM_INTF_PARM_HAL_VERSION, hal_version);
4520 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4521 CAM_INTF_META_STREAM_INFO, stream_config_info);
4522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4523 mParameters);
4524 if (rc < 0) {
4525 LOGE("set_parms for unconfigure failed");
4526 pthread_mutex_unlock(&mMutex);
4527 return rc;
4528 }
4529 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004530 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004531 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004532 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004533 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004534 property_get("persist.camera.is_type", is_type_value, "4");
4535 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4536 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4537 property_get("persist.camera.is_type_preview", is_type_value, "4");
4538 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4539 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004540
4541 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4542 int32_t hal_version = CAM_HAL_V3;
4543 uint8_t captureIntent =
4544 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4545 mCaptureIntent = captureIntent;
4546 clear_metadata_buffer(mParameters);
4547 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4548 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4549 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004550 if (mFirstConfiguration) {
4551 // configure instant AEC
4552 // Instant AEC is a session based parameter and it is needed only
4553 // once per complete session after open camera.
4554 // i.e. This is set only once for the first capture request, after open camera.
4555 setInstantAEC(meta);
4556 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004557 uint8_t fwkVideoStabMode=0;
4558 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4559 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4560 }
4561
4562 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4563 // turn it on for video/preview
4564 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4565 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004566 int32_t vsMode;
4567 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4569 rc = BAD_VALUE;
4570 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004571 LOGD("setEis %d", setEis);
4572 bool eis3Supported = false;
4573 size_t count = IS_TYPE_MAX;
4574 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4575 for (size_t i = 0; i < count; i++) {
4576 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4577 eis3Supported = true;
4578 break;
4579 }
4580 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004581
4582 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004583 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004584 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4585 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004586 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4587 is_type = isTypePreview;
4588 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4589 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4590 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004591 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004592 } else {
4593 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004594 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004595 } else {
4596 is_type = IS_TYPE_NONE;
4597 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004598 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004599 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004600 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4601 }
4602 }
4603
4604 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4605 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4606
Thierry Strudel54dc9782017-02-15 12:12:10 -08004607 //Disable tintless only if the property is set to 0
4608 memset(prop, 0, sizeof(prop));
4609 property_get("persist.camera.tintless.enable", prop, "1");
4610 int32_t tintless_value = atoi(prop);
4611
Thierry Strudel3d639192016-09-09 11:52:26 -07004612 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4613 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004614
Thierry Strudel3d639192016-09-09 11:52:26 -07004615 //Disable CDS for HFR mode or if DIS/EIS is on.
4616 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4617 //after every configure_stream
4618 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4619 (m_bIsVideo)) {
4620 int32_t cds = CAM_CDS_MODE_OFF;
4621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4622 CAM_INTF_PARM_CDS_MODE, cds))
4623 LOGE("Failed to disable CDS for HFR mode");
4624
4625 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004626
4627 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4628 uint8_t* use_av_timer = NULL;
4629
4630 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004631 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004632 use_av_timer = &m_debug_avtimer;
4633 }
4634 else{
4635 use_av_timer =
4636 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004637 if (use_av_timer) {
4638 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4639 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004640 }
4641
4642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4643 rc = BAD_VALUE;
4644 }
4645 }
4646
Thierry Strudel3d639192016-09-09 11:52:26 -07004647 setMobicat();
4648
4649 /* Set fps and hfr mode while sending meta stream info so that sensor
4650 * can configure appropriate streaming mode */
4651 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004652 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4653 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004654 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4655 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004656 if (rc == NO_ERROR) {
4657 int32_t max_fps =
4658 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004659 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004660 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4661 }
4662 /* For HFR, more buffers are dequeued upfront to improve the performance */
4663 if (mBatchSize) {
4664 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4665 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4666 }
4667 }
4668 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004669 LOGE("setHalFpsRange failed");
4670 }
4671 }
4672 if (meta.exists(ANDROID_CONTROL_MODE)) {
4673 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4674 rc = extractSceneMode(meta, metaMode, mParameters);
4675 if (rc != NO_ERROR) {
4676 LOGE("extractSceneMode failed");
4677 }
4678 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004679 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004680
Thierry Strudel04e026f2016-10-10 11:27:36 -07004681 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4682 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4683 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4684 rc = setVideoHdrMode(mParameters, vhdr);
4685 if (rc != NO_ERROR) {
4686 LOGE("setVideoHDR is failed");
4687 }
4688 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004689
Thierry Strudel3d639192016-09-09 11:52:26 -07004690 //TODO: validate the arguments, HSV scenemode should have only the
4691 //advertised fps ranges
4692
4693 /*set the capture intent, hal version, tintless, stream info,
4694 *and disenable parameters to the backend*/
4695 LOGD("set_parms META_STREAM_INFO " );
4696 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004697 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4698 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004699 mStreamConfigInfo.type[i],
4700 mStreamConfigInfo.stream_sizes[i].width,
4701 mStreamConfigInfo.stream_sizes[i].height,
4702 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004703 mStreamConfigInfo.format[i],
4704 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004705 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004706
Thierry Strudel3d639192016-09-09 11:52:26 -07004707 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4708 mParameters);
4709 if (rc < 0) {
4710 LOGE("set_parms failed for hal version, stream info");
4711 }
4712
Chien-Yu Chenee335912017-02-09 17:53:20 -08004713 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4714 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004715 if (rc != NO_ERROR) {
4716 LOGE("Failed to get sensor output size");
4717 pthread_mutex_unlock(&mMutex);
4718 goto error_exit;
4719 }
4720
4721 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4722 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004723 mSensorModeInfo.active_array_size.width,
4724 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004725
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004726 if (gHdrPlusClient != nullptr) {
4727 rc = gHdrPlusClient->setEaselBypassMipiRate(mCameraId, mSensorModeInfo.op_pixel_clk);
4728 if (rc != OK) {
4729 ALOGE("%s: Failed to set Easel bypass MIPI rate for camera %u to %u", __FUNCTION__,
4730 mCameraId, mSensorModeInfo.op_pixel_clk);
4731 pthread_mutex_unlock(&mMutex);
4732 goto error_exit;
4733 }
4734 }
4735
Thierry Strudel3d639192016-09-09 11:52:26 -07004736 /* Set batchmode before initializing channel. Since registerBuffer
4737 * internally initializes some of the channels, better set batchmode
4738 * even before first register buffer */
4739 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4740 it != mStreamInfo.end(); it++) {
4741 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4742 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4743 && mBatchSize) {
4744 rc = channel->setBatchSize(mBatchSize);
4745 //Disable per frame map unmap for HFR/batchmode case
4746 rc |= channel->setPerFrameMapUnmap(false);
4747 if (NO_ERROR != rc) {
4748 LOGE("Channel init failed %d", rc);
4749 pthread_mutex_unlock(&mMutex);
4750 goto error_exit;
4751 }
4752 }
4753 }
4754
4755 //First initialize all streams
4756 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4757 it != mStreamInfo.end(); it++) {
4758 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4759 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4760 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004761 setEis) {
4762 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4763 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4764 is_type = mStreamConfigInfo.is_type[i];
4765 break;
4766 }
4767 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004768 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004769 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004770 rc = channel->initialize(IS_TYPE_NONE);
4771 }
4772 if (NO_ERROR != rc) {
4773 LOGE("Channel initialization failed %d", rc);
4774 pthread_mutex_unlock(&mMutex);
4775 goto error_exit;
4776 }
4777 }
4778
4779 if (mRawDumpChannel) {
4780 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4781 if (rc != NO_ERROR) {
4782 LOGE("Error: Raw Dump Channel init failed");
4783 pthread_mutex_unlock(&mMutex);
4784 goto error_exit;
4785 }
4786 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 if (mHdrPlusRawSrcChannel) {
4788 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4789 if (rc != NO_ERROR) {
4790 LOGE("Error: HDR+ RAW Source Channel init failed");
4791 pthread_mutex_unlock(&mMutex);
4792 goto error_exit;
4793 }
4794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 if (mSupportChannel) {
4796 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4797 if (rc < 0) {
4798 LOGE("Support channel initialization failed");
4799 pthread_mutex_unlock(&mMutex);
4800 goto error_exit;
4801 }
4802 }
4803 if (mAnalysisChannel) {
4804 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4805 if (rc < 0) {
4806 LOGE("Analysis channel initialization failed");
4807 pthread_mutex_unlock(&mMutex);
4808 goto error_exit;
4809 }
4810 }
4811 if (mDummyBatchChannel) {
4812 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4813 if (rc < 0) {
4814 LOGE("mDummyBatchChannel setBatchSize failed");
4815 pthread_mutex_unlock(&mMutex);
4816 goto error_exit;
4817 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004818 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004819 if (rc < 0) {
4820 LOGE("mDummyBatchChannel initialization failed");
4821 pthread_mutex_unlock(&mMutex);
4822 goto error_exit;
4823 }
4824 }
4825
4826 // Set bundle info
4827 rc = setBundleInfo();
4828 if (rc < 0) {
4829 LOGE("setBundleInfo failed %d", rc);
4830 pthread_mutex_unlock(&mMutex);
4831 goto error_exit;
4832 }
4833
4834 //update settings from app here
4835 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4836 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4837 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4838 }
4839 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4840 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4841 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4842 }
4843 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4844 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4845 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4846
4847 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4848 (mLinkedCameraId != mCameraId) ) {
4849 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4850 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004852 goto error_exit;
4853 }
4854 }
4855
4856 // add bundle related cameras
4857 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4858 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004859 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4860 &m_pDualCamCmdPtr->bundle_info;
4861 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004862 if (mIsDeviceLinked)
4863 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4864 else
4865 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4866
4867 pthread_mutex_lock(&gCamLock);
4868
4869 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4870 LOGE("Dualcam: Invalid Session Id ");
4871 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004872 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004873 goto error_exit;
4874 }
4875
4876 if (mIsMainCamera == 1) {
4877 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4878 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004879 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004880 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 // related session id should be session id of linked session
4882 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4883 } else {
4884 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4885 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004886 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004887 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004888 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4889 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004890 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 pthread_mutex_unlock(&gCamLock);
4892
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004893 rc = mCameraHandle->ops->set_dual_cam_cmd(
4894 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 if (rc < 0) {
4896 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004898 goto error_exit;
4899 }
4900 }
4901
4902 //Then start them.
4903 LOGH("Start META Channel");
4904 rc = mMetadataChannel->start();
4905 if (rc < 0) {
4906 LOGE("META channel start failed");
4907 pthread_mutex_unlock(&mMutex);
4908 goto error_exit;
4909 }
4910
4911 if (mAnalysisChannel) {
4912 rc = mAnalysisChannel->start();
4913 if (rc < 0) {
4914 LOGE("Analysis channel start failed");
4915 mMetadataChannel->stop();
4916 pthread_mutex_unlock(&mMutex);
4917 goto error_exit;
4918 }
4919 }
4920
4921 if (mSupportChannel) {
4922 rc = mSupportChannel->start();
4923 if (rc < 0) {
4924 LOGE("Support channel start failed");
4925 mMetadataChannel->stop();
4926 /* Although support and analysis are mutually exclusive today
4927 adding it in anycase for future proofing */
4928 if (mAnalysisChannel) {
4929 mAnalysisChannel->stop();
4930 }
4931 pthread_mutex_unlock(&mMutex);
4932 goto error_exit;
4933 }
4934 }
4935 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4936 it != mStreamInfo.end(); it++) {
4937 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4938 LOGH("Start Processing Channel mask=%d",
4939 channel->getStreamTypeMask());
4940 rc = channel->start();
4941 if (rc < 0) {
4942 LOGE("channel start failed");
4943 pthread_mutex_unlock(&mMutex);
4944 goto error_exit;
4945 }
4946 }
4947
4948 if (mRawDumpChannel) {
4949 LOGD("Starting raw dump stream");
4950 rc = mRawDumpChannel->start();
4951 if (rc != NO_ERROR) {
4952 LOGE("Error Starting Raw Dump Channel");
4953 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4954 it != mStreamInfo.end(); it++) {
4955 QCamera3Channel *channel =
4956 (QCamera3Channel *)(*it)->stream->priv;
4957 LOGH("Stopping Processing Channel mask=%d",
4958 channel->getStreamTypeMask());
4959 channel->stop();
4960 }
4961 if (mSupportChannel)
4962 mSupportChannel->stop();
4963 if (mAnalysisChannel) {
4964 mAnalysisChannel->stop();
4965 }
4966 mMetadataChannel->stop();
4967 pthread_mutex_unlock(&mMutex);
4968 goto error_exit;
4969 }
4970 }
4971
4972 if (mChannelHandle) {
4973
4974 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4975 mChannelHandle);
4976 if (rc != NO_ERROR) {
4977 LOGE("start_channel failed %d", rc);
4978 pthread_mutex_unlock(&mMutex);
4979 goto error_exit;
4980 }
4981 }
4982
4983 goto no_error;
4984error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004985 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004986 return rc;
4987no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 mWokenUpByDaemon = false;
4989 mPendingLiveRequest = 0;
4990 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
4992
Chien-Yu Chenee335912017-02-09 17:53:20 -08004993 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004994 if (gHdrPlusClient != nullptr && !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
Chien-Yu Chenee335912017-02-09 17:53:20 -08004995 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
4996 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
4997 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
4998 rc = enableHdrPlusModeLocked();
4999 if (rc != OK) {
5000 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
5001 pthread_mutex_unlock(&mMutex);
5002 return rc;
5003 }
5004
5005 // Start HDR+ RAW source channel if AP provides RAW input buffers.
5006 if (mHdrPlusRawSrcChannel) {
5007 rc = mHdrPlusRawSrcChannel->start();
5008 if (rc != OK) {
5009 LOGE("Error Starting HDR+ RAW Channel");
5010 pthread_mutex_unlock(&mMutex);
5011 return rc;
5012 }
5013 }
5014 mFirstPreviewIntentSeen = true;
5015 }
5016
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005018 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005019
5020 if (mFlushPerf) {
5021 //we cannot accept any requests during flush
5022 LOGE("process_capture_request cannot proceed during flush");
5023 pthread_mutex_unlock(&mMutex);
5024 return NO_ERROR; //should return an error
5025 }
5026
5027 if (meta.exists(ANDROID_REQUEST_ID)) {
5028 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5029 mCurrentRequestId = request_id;
5030 LOGD("Received request with id: %d", request_id);
5031 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5032 LOGE("Unable to find request id field, \
5033 & no previous id available");
5034 pthread_mutex_unlock(&mMutex);
5035 return NAME_NOT_FOUND;
5036 } else {
5037 LOGD("Re-using old request id");
5038 request_id = mCurrentRequestId;
5039 }
5040
5041 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5042 request->num_output_buffers,
5043 request->input_buffer,
5044 frameNumber);
5045 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005046 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005048 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 uint32_t snapshotStreamId = 0;
5050 for (size_t i = 0; i < request->num_output_buffers; i++) {
5051 const camera3_stream_buffer_t& output = request->output_buffers[i];
5052 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5053
Emilian Peev7650c122017-01-19 08:24:33 -08005054 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5055 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005056 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005057 blob_request = 1;
5058 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5059 }
5060
5061 if (output.acquire_fence != -1) {
5062 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5063 close(output.acquire_fence);
5064 if (rc != OK) {
5065 LOGE("sync wait failed %d", rc);
5066 pthread_mutex_unlock(&mMutex);
5067 return rc;
5068 }
5069 }
5070
Emilian Peev7650c122017-01-19 08:24:33 -08005071 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
5072 depthRequestPresent = true;
5073 continue;
5074 }
5075
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005076 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005078
5079 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5080 isVidBufRequested = true;
5081 }
5082 }
5083
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005084 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5085 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5086 itr++) {
5087 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5088 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5089 channel->getStreamID(channel->getStreamTypeMask());
5090
5091 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5092 isVidBufRequested = true;
5093 }
5094 }
5095
Thierry Strudel3d639192016-09-09 11:52:26 -07005096 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005097 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005098 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005099 }
5100 if (blob_request && mRawDumpChannel) {
5101 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005102 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005104 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005105 }
5106
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005107 {
5108 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5109 // Request a RAW buffer if
5110 // 1. mHdrPlusRawSrcChannel is valid.
5111 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5112 // 3. There is no pending HDR+ request.
5113 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5114 mHdrPlusPendingRequests.size() == 0) {
5115 streamsArray.stream_request[streamsArray.num_streams].streamID =
5116 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5117 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5118 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005119 }
5120
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005121 //extract capture intent
5122 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5123 mCaptureIntent =
5124 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5125 }
5126
5127 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5128 mCacMode =
5129 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5130 }
5131
5132 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005133 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005134
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005135 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005136 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005137 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5138 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005139 }
5140
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005141 if (hdrPlusRequest) {
5142 // For a HDR+ request, just set the frame parameters.
5143 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5144 if (rc < 0) {
5145 LOGE("fail to set frame parameters");
5146 pthread_mutex_unlock(&mMutex);
5147 return rc;
5148 }
5149 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 /* Parse the settings:
5151 * - For every request in NORMAL MODE
5152 * - For every request in HFR mode during preview only case
5153 * - For first request of every batch in HFR mode during video
5154 * recording. In batchmode the same settings except frame number is
5155 * repeated in each request of the batch.
5156 */
5157 if (!mBatchSize ||
5158 (mBatchSize && !isVidBufRequested) ||
5159 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005160 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005161 if (rc < 0) {
5162 LOGE("fail to set frame parameters");
5163 pthread_mutex_unlock(&mMutex);
5164 return rc;
5165 }
5166 }
5167 /* For batchMode HFR, setFrameParameters is not called for every
5168 * request. But only frame number of the latest request is parsed.
5169 * Keep track of first and last frame numbers in a batch so that
5170 * metadata for the frame numbers of batch can be duplicated in
5171 * handleBatchMetadta */
5172 if (mBatchSize) {
5173 if (!mToBeQueuedVidBufs) {
5174 //start of the batch
5175 mFirstFrameNumberInBatch = request->frame_number;
5176 }
5177 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5178 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5179 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005180 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 return BAD_VALUE;
5182 }
5183 }
5184 if (mNeedSensorRestart) {
5185 /* Unlock the mutex as restartSensor waits on the channels to be
5186 * stopped, which in turn calls stream callback functions -
5187 * handleBufferWithLock and handleMetadataWithLock */
5188 pthread_mutex_unlock(&mMutex);
5189 rc = dynamicUpdateMetaStreamInfo();
5190 if (rc != NO_ERROR) {
5191 LOGE("Restarting the sensor failed");
5192 return BAD_VALUE;
5193 }
5194 mNeedSensorRestart = false;
5195 pthread_mutex_lock(&mMutex);
5196 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005197 if(mResetInstantAEC) {
5198 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5199 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5200 mResetInstantAEC = false;
5201 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005202 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005203 if (request->input_buffer->acquire_fence != -1) {
5204 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5205 close(request->input_buffer->acquire_fence);
5206 if (rc != OK) {
5207 LOGE("input buffer sync wait failed %d", rc);
5208 pthread_mutex_unlock(&mMutex);
5209 return rc;
5210 }
5211 }
5212 }
5213
5214 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5215 mLastCustIntentFrmNum = frameNumber;
5216 }
5217 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005218 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005219 pendingRequestIterator latestRequest;
5220 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005221 pendingRequest.num_buffers = depthRequestPresent ?
5222 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 pendingRequest.request_id = request_id;
5224 pendingRequest.blob_request = blob_request;
5225 pendingRequest.timestamp = 0;
5226 pendingRequest.bUrgentReceived = 0;
5227 if (request->input_buffer) {
5228 pendingRequest.input_buffer =
5229 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5230 *(pendingRequest.input_buffer) = *(request->input_buffer);
5231 pInputBuffer = pendingRequest.input_buffer;
5232 } else {
5233 pendingRequest.input_buffer = NULL;
5234 pInputBuffer = NULL;
5235 }
5236
5237 pendingRequest.pipeline_depth = 0;
5238 pendingRequest.partial_result_cnt = 0;
5239 extractJpegMetadata(mCurJpegMeta, request);
5240 pendingRequest.jpegMetadata = mCurJpegMeta;
5241 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5242 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005244 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5245 mHybridAeEnable =
5246 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5247 }
5248 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005249 /* DevCamDebug metadata processCaptureRequest */
5250 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5251 mDevCamDebugMetaEnable =
5252 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5253 }
5254 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5255 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005256
5257 //extract CAC info
5258 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5259 mCacMode =
5260 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5261 }
5262 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005263 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005264
5265 PendingBuffersInRequest bufsForCurRequest;
5266 bufsForCurRequest.frame_number = frameNumber;
5267 // Mark current timestamp for the new request
5268 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005269 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005270
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005271 if (hdrPlusRequest) {
5272 // Save settings for this request.
5273 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5274 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5275
5276 // Add to pending HDR+ request queue.
5277 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5278 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5279
5280 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5281 }
5282
Thierry Strudel3d639192016-09-09 11:52:26 -07005283 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005284 if (request->output_buffers[i].stream->data_space ==
5285 HAL_DATASPACE_DEPTH) {
5286 continue;
5287 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005288 RequestedBufferInfo requestedBuf;
5289 memset(&requestedBuf, 0, sizeof(requestedBuf));
5290 requestedBuf.stream = request->output_buffers[i].stream;
5291 requestedBuf.buffer = NULL;
5292 pendingRequest.buffers.push_back(requestedBuf);
5293
5294 // Add to buffer handle the pending buffers list
5295 PendingBufferInfo bufferInfo;
5296 bufferInfo.buffer = request->output_buffers[i].buffer;
5297 bufferInfo.stream = request->output_buffers[i].stream;
5298 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5299 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5300 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5301 frameNumber, bufferInfo.buffer,
5302 channel->getStreamTypeMask(), bufferInfo.stream->format);
5303 }
5304 // Add this request packet into mPendingBuffersMap
5305 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5306 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5307 mPendingBuffersMap.get_num_overall_buffers());
5308
5309 latestRequest = mPendingRequestsList.insert(
5310 mPendingRequestsList.end(), pendingRequest);
5311 if(mFlush) {
5312 LOGI("mFlush is true");
5313 pthread_mutex_unlock(&mMutex);
5314 return NO_ERROR;
5315 }
5316
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005317 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5318 // channel.
5319 if (!hdrPlusRequest) {
5320 int indexUsed;
5321 // Notify metadata channel we receive a request
5322 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005323
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005324 if(request->input_buffer != NULL){
5325 LOGD("Input request, frame_number %d", frameNumber);
5326 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5327 if (NO_ERROR != rc) {
5328 LOGE("fail to set reproc parameters");
5329 pthread_mutex_unlock(&mMutex);
5330 return rc;
5331 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005332 }
5333
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005334 // Call request on other streams
5335 uint32_t streams_need_metadata = 0;
5336 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5337 for (size_t i = 0; i < request->num_output_buffers; i++) {
5338 const camera3_stream_buffer_t& output = request->output_buffers[i];
5339 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5340
5341 if (channel == NULL) {
5342 LOGW("invalid channel pointer for stream");
5343 continue;
5344 }
5345
5346 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5347 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5348 output.buffer, request->input_buffer, frameNumber);
5349 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005351 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5352 if (rc < 0) {
5353 LOGE("Fail to request on picture channel");
5354 pthread_mutex_unlock(&mMutex);
5355 return rc;
5356 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005358 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5359 assert(NULL != mDepthChannel);
5360 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005361
Emilian Peev7650c122017-01-19 08:24:33 -08005362 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5363 if (rc < 0) {
5364 LOGE("Fail to map on depth buffer");
5365 pthread_mutex_unlock(&mMutex);
5366 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005367 }
Emilian Peev7650c122017-01-19 08:24:33 -08005368 } else {
5369 LOGD("snapshot request with buffer %p, frame_number %d",
5370 output.buffer, frameNumber);
5371 if (!request->settings) {
5372 rc = channel->request(output.buffer, frameNumber,
5373 NULL, mPrevParameters, indexUsed);
5374 } else {
5375 rc = channel->request(output.buffer, frameNumber,
5376 NULL, mParameters, indexUsed);
5377 }
5378 if (rc < 0) {
5379 LOGE("Fail to request on picture channel");
5380 pthread_mutex_unlock(&mMutex);
5381 return rc;
5382 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005383
Emilian Peev7650c122017-01-19 08:24:33 -08005384 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5385 uint32_t j = 0;
5386 for (j = 0; j < streamsArray.num_streams; j++) {
5387 if (streamsArray.stream_request[j].streamID == streamId) {
5388 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5389 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5390 else
5391 streamsArray.stream_request[j].buf_index = indexUsed;
5392 break;
5393 }
5394 }
5395 if (j == streamsArray.num_streams) {
5396 LOGE("Did not find matching stream to update index");
5397 assert(0);
5398 }
5399
5400 pendingBufferIter->need_metadata = true;
5401 streams_need_metadata++;
5402 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005403 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005404 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5405 bool needMetadata = false;
5406 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5407 rc = yuvChannel->request(output.buffer, frameNumber,
5408 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5409 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005410 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005411 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 pthread_mutex_unlock(&mMutex);
5413 return rc;
5414 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005415
5416 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5417 uint32_t j = 0;
5418 for (j = 0; j < streamsArray.num_streams; j++) {
5419 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005420 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5421 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5422 else
5423 streamsArray.stream_request[j].buf_index = indexUsed;
5424 break;
5425 }
5426 }
5427 if (j == streamsArray.num_streams) {
5428 LOGE("Did not find matching stream to update index");
5429 assert(0);
5430 }
5431
5432 pendingBufferIter->need_metadata = needMetadata;
5433 if (needMetadata)
5434 streams_need_metadata += 1;
5435 LOGD("calling YUV channel request, need_metadata is %d",
5436 needMetadata);
5437 } else {
5438 LOGD("request with buffer %p, frame_number %d",
5439 output.buffer, frameNumber);
5440
5441 rc = channel->request(output.buffer, frameNumber, indexUsed);
5442
5443 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5444 uint32_t j = 0;
5445 for (j = 0; j < streamsArray.num_streams; j++) {
5446 if (streamsArray.stream_request[j].streamID == streamId) {
5447 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5448 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5449 else
5450 streamsArray.stream_request[j].buf_index = indexUsed;
5451 break;
5452 }
5453 }
5454 if (j == streamsArray.num_streams) {
5455 LOGE("Did not find matching stream to update index");
5456 assert(0);
5457 }
5458
5459 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5460 && mBatchSize) {
5461 mToBeQueuedVidBufs++;
5462 if (mToBeQueuedVidBufs == mBatchSize) {
5463 channel->queueBatchBuf();
5464 }
5465 }
5466 if (rc < 0) {
5467 LOGE("request failed");
5468 pthread_mutex_unlock(&mMutex);
5469 return rc;
5470 }
5471 }
5472 pendingBufferIter++;
5473 }
5474
5475 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5476 itr++) {
5477 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5478
5479 if (channel == NULL) {
5480 LOGE("invalid channel pointer for stream");
5481 assert(0);
5482 return BAD_VALUE;
5483 }
5484
5485 InternalRequest requestedStream;
5486 requestedStream = (*itr);
5487
5488
5489 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5490 LOGD("snapshot request internally input buffer %p, frame_number %d",
5491 request->input_buffer, frameNumber);
5492 if(request->input_buffer != NULL){
5493 rc = channel->request(NULL, frameNumber,
5494 pInputBuffer, &mReprocMeta, indexUsed, true,
5495 requestedStream.meteringOnly);
5496 if (rc < 0) {
5497 LOGE("Fail to request on picture channel");
5498 pthread_mutex_unlock(&mMutex);
5499 return rc;
5500 }
5501 } else {
5502 LOGD("snapshot request with frame_number %d", frameNumber);
5503 if (!request->settings) {
5504 rc = channel->request(NULL, frameNumber,
5505 NULL, mPrevParameters, indexUsed, true,
5506 requestedStream.meteringOnly);
5507 } else {
5508 rc = channel->request(NULL, frameNumber,
5509 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5510 }
5511 if (rc < 0) {
5512 LOGE("Fail to request on picture channel");
5513 pthread_mutex_unlock(&mMutex);
5514 return rc;
5515 }
5516
5517 if ((*itr).meteringOnly != 1) {
5518 requestedStream.need_metadata = 1;
5519 streams_need_metadata++;
5520 }
5521 }
5522
5523 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5524 uint32_t j = 0;
5525 for (j = 0; j < streamsArray.num_streams; j++) {
5526 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005527 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5528 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5529 else
5530 streamsArray.stream_request[j].buf_index = indexUsed;
5531 break;
5532 }
5533 }
5534 if (j == streamsArray.num_streams) {
5535 LOGE("Did not find matching stream to update index");
5536 assert(0);
5537 }
5538
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005539 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005540 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005541 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005542 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005543 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005544 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005545 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005546
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005547 //If 2 streams have need_metadata set to true, fail the request, unless
5548 //we copy/reference count the metadata buffer
5549 if (streams_need_metadata > 1) {
5550 LOGE("not supporting request in which two streams requires"
5551 " 2 HAL metadata for reprocessing");
5552 pthread_mutex_unlock(&mMutex);
5553 return -EINVAL;
5554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005555
Emilian Peev7650c122017-01-19 08:24:33 -08005556 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5558 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5559 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5560 pthread_mutex_unlock(&mMutex);
5561 return BAD_VALUE;
5562 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005563 if (request->input_buffer == NULL) {
5564 /* Set the parameters to backend:
5565 * - For every request in NORMAL MODE
5566 * - For every request in HFR mode during preview only case
5567 * - Once every batch in HFR mode during video recording
5568 */
5569 if (!mBatchSize ||
5570 (mBatchSize && !isVidBufRequested) ||
5571 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5572 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5573 mBatchSize, isVidBufRequested,
5574 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005575
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005576 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5577 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5578 uint32_t m = 0;
5579 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5580 if (streamsArray.stream_request[k].streamID ==
5581 mBatchedStreamsArray.stream_request[m].streamID)
5582 break;
5583 }
5584 if (m == mBatchedStreamsArray.num_streams) {
5585 mBatchedStreamsArray.stream_request\
5586 [mBatchedStreamsArray.num_streams].streamID =
5587 streamsArray.stream_request[k].streamID;
5588 mBatchedStreamsArray.stream_request\
5589 [mBatchedStreamsArray.num_streams].buf_index =
5590 streamsArray.stream_request[k].buf_index;
5591 mBatchedStreamsArray.num_streams =
5592 mBatchedStreamsArray.num_streams + 1;
5593 }
5594 }
5595 streamsArray = mBatchedStreamsArray;
5596 }
5597 /* Update stream id of all the requested buffers */
5598 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5599 streamsArray)) {
5600 LOGE("Failed to set stream type mask in the parameters");
5601 return BAD_VALUE;
5602 }
5603
5604 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5605 mParameters);
5606 if (rc < 0) {
5607 LOGE("set_parms failed");
5608 }
5609 /* reset to zero coz, the batch is queued */
5610 mToBeQueuedVidBufs = 0;
5611 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5612 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5613 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005614 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5615 uint32_t m = 0;
5616 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5617 if (streamsArray.stream_request[k].streamID ==
5618 mBatchedStreamsArray.stream_request[m].streamID)
5619 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005620 }
5621 if (m == mBatchedStreamsArray.num_streams) {
5622 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5623 streamID = streamsArray.stream_request[k].streamID;
5624 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5625 buf_index = streamsArray.stream_request[k].buf_index;
5626 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5627 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005628 }
5629 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005630 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005632 }
5633
5634 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5635
5636 mState = STARTED;
5637 // Added a timed condition wait
5638 struct timespec ts;
5639 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005640 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 if (rc < 0) {
5642 isValidTimeout = 0;
5643 LOGE("Error reading the real time clock!!");
5644 }
5645 else {
5646 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005647 int64_t timeout = 5;
5648 {
5649 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5650 // If there is a pending HDR+ request, the following requests may be blocked until the
5651 // HDR+ request is done. So allow a longer timeout.
5652 if (mHdrPlusPendingRequests.size() > 0) {
5653 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5654 }
5655 }
5656 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 }
5658 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005659 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 (mState != ERROR) && (mState != DEINIT)) {
5661 if (!isValidTimeout) {
5662 LOGD("Blocking on conditional wait");
5663 pthread_cond_wait(&mRequestCond, &mMutex);
5664 }
5665 else {
5666 LOGD("Blocking on timed conditional wait");
5667 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5668 if (rc == ETIMEDOUT) {
5669 rc = -ENODEV;
5670 LOGE("Unblocked on timeout!!!!");
5671 break;
5672 }
5673 }
5674 LOGD("Unblocked");
5675 if (mWokenUpByDaemon) {
5676 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005677 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 break;
5679 }
5680 }
5681 pthread_mutex_unlock(&mMutex);
5682
5683 return rc;
5684}
5685
5686/*===========================================================================
5687 * FUNCTION : dump
5688 *
5689 * DESCRIPTION:
5690 *
5691 * PARAMETERS :
5692 *
5693 *
5694 * RETURN :
5695 *==========================================================================*/
5696void QCamera3HardwareInterface::dump(int fd)
5697{
5698 pthread_mutex_lock(&mMutex);
5699 dprintf(fd, "\n Camera HAL3 information Begin \n");
5700
5701 dprintf(fd, "\nNumber of pending requests: %zu \n",
5702 mPendingRequestsList.size());
5703 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5704 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5705 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5706 for(pendingRequestIterator i = mPendingRequestsList.begin();
5707 i != mPendingRequestsList.end(); i++) {
5708 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5709 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5710 i->input_buffer);
5711 }
5712 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5713 mPendingBuffersMap.get_num_overall_buffers());
5714 dprintf(fd, "-------+------------------\n");
5715 dprintf(fd, " Frame | Stream type mask \n");
5716 dprintf(fd, "-------+------------------\n");
5717 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5718 for(auto &j : req.mPendingBufferList) {
5719 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5720 dprintf(fd, " %5d | %11d \n",
5721 req.frame_number, channel->getStreamTypeMask());
5722 }
5723 }
5724 dprintf(fd, "-------+------------------\n");
5725
5726 dprintf(fd, "\nPending frame drop list: %zu\n",
5727 mPendingFrameDropList.size());
5728 dprintf(fd, "-------+-----------\n");
5729 dprintf(fd, " Frame | Stream ID \n");
5730 dprintf(fd, "-------+-----------\n");
5731 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5732 i != mPendingFrameDropList.end(); i++) {
5733 dprintf(fd, " %5d | %9d \n",
5734 i->frame_number, i->stream_ID);
5735 }
5736 dprintf(fd, "-------+-----------\n");
5737
5738 dprintf(fd, "\n Camera HAL3 information End \n");
5739
5740 /* use dumpsys media.camera as trigger to send update debug level event */
5741 mUpdateDebugLevel = true;
5742 pthread_mutex_unlock(&mMutex);
5743 return;
5744}
5745
5746/*===========================================================================
5747 * FUNCTION : flush
5748 *
5749 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5750 * conditionally restarts channels
5751 *
5752 * PARAMETERS :
5753 * @ restartChannels: re-start all channels
5754 *
5755 *
5756 * RETURN :
5757 * 0 on success
5758 * Error code on failure
5759 *==========================================================================*/
5760int QCamera3HardwareInterface::flush(bool restartChannels)
5761{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005762 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005763 int32_t rc = NO_ERROR;
5764
5765 LOGD("Unblocking Process Capture Request");
5766 pthread_mutex_lock(&mMutex);
5767 mFlush = true;
5768 pthread_mutex_unlock(&mMutex);
5769
5770 rc = stopAllChannels();
5771 // unlink of dualcam
5772 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005773 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5774 &m_pDualCamCmdPtr->bundle_info;
5775 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005776 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5777 pthread_mutex_lock(&gCamLock);
5778
5779 if (mIsMainCamera == 1) {
5780 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5781 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005782 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005783 // related session id should be session id of linked session
5784 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5785 } else {
5786 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5787 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005788 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005789 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5790 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005791 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005792 pthread_mutex_unlock(&gCamLock);
5793
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005794 rc = mCameraHandle->ops->set_dual_cam_cmd(
5795 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005796 if (rc < 0) {
5797 LOGE("Dualcam: Unlink failed, but still proceed to close");
5798 }
5799 }
5800
5801 if (rc < 0) {
5802 LOGE("stopAllChannels failed");
5803 return rc;
5804 }
5805 if (mChannelHandle) {
5806 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5807 mChannelHandle);
5808 }
5809
5810 // Reset bundle info
5811 rc = setBundleInfo();
5812 if (rc < 0) {
5813 LOGE("setBundleInfo failed %d", rc);
5814 return rc;
5815 }
5816
5817 // Mutex Lock
5818 pthread_mutex_lock(&mMutex);
5819
5820 // Unblock process_capture_request
5821 mPendingLiveRequest = 0;
5822 pthread_cond_signal(&mRequestCond);
5823
5824 rc = notifyErrorForPendingRequests();
5825 if (rc < 0) {
5826 LOGE("notifyErrorForPendingRequests failed");
5827 pthread_mutex_unlock(&mMutex);
5828 return rc;
5829 }
5830
5831 mFlush = false;
5832
5833 // Start the Streams/Channels
5834 if (restartChannels) {
5835 rc = startAllChannels();
5836 if (rc < 0) {
5837 LOGE("startAllChannels failed");
5838 pthread_mutex_unlock(&mMutex);
5839 return rc;
5840 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005841 if (mChannelHandle) {
5842 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5843 mChannelHandle);
5844 if (rc < 0) {
5845 LOGE("start_channel failed");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005849 }
5850 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005851 pthread_mutex_unlock(&mMutex);
5852
5853 return 0;
5854}
5855
5856/*===========================================================================
5857 * FUNCTION : flushPerf
5858 *
5859 * DESCRIPTION: This is the performance optimization version of flush that does
5860 * not use stream off, rather flushes the system
5861 *
5862 * PARAMETERS :
5863 *
5864 *
5865 * RETURN : 0 : success
5866 * -EINVAL: input is malformed (device is not valid)
5867 * -ENODEV: if the device has encountered a serious error
5868 *==========================================================================*/
5869int QCamera3HardwareInterface::flushPerf()
5870{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005871 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005872 int32_t rc = 0;
5873 struct timespec timeout;
5874 bool timed_wait = false;
5875
5876 pthread_mutex_lock(&mMutex);
5877 mFlushPerf = true;
5878 mPendingBuffersMap.numPendingBufsAtFlush =
5879 mPendingBuffersMap.get_num_overall_buffers();
5880 LOGD("Calling flush. Wait for %d buffers to return",
5881 mPendingBuffersMap.numPendingBufsAtFlush);
5882
5883 /* send the flush event to the backend */
5884 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5885 if (rc < 0) {
5886 LOGE("Error in flush: IOCTL failure");
5887 mFlushPerf = false;
5888 pthread_mutex_unlock(&mMutex);
5889 return -ENODEV;
5890 }
5891
5892 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5893 LOGD("No pending buffers in HAL, return flush");
5894 mFlushPerf = false;
5895 pthread_mutex_unlock(&mMutex);
5896 return rc;
5897 }
5898
5899 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005900 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005901 if (rc < 0) {
5902 LOGE("Error reading the real time clock, cannot use timed wait");
5903 } else {
5904 timeout.tv_sec += FLUSH_TIMEOUT;
5905 timed_wait = true;
5906 }
5907
5908 //Block on conditional variable
5909 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5910 LOGD("Waiting on mBuffersCond");
5911 if (!timed_wait) {
5912 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5913 if (rc != 0) {
5914 LOGE("pthread_cond_wait failed due to rc = %s",
5915 strerror(rc));
5916 break;
5917 }
5918 } else {
5919 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5920 if (rc != 0) {
5921 LOGE("pthread_cond_timedwait failed due to rc = %s",
5922 strerror(rc));
5923 break;
5924 }
5925 }
5926 }
5927 if (rc != 0) {
5928 mFlushPerf = false;
5929 pthread_mutex_unlock(&mMutex);
5930 return -ENODEV;
5931 }
5932
5933 LOGD("Received buffers, now safe to return them");
5934
5935 //make sure the channels handle flush
5936 //currently only required for the picture channel to release snapshot resources
5937 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5938 it != mStreamInfo.end(); it++) {
5939 QCamera3Channel *channel = (*it)->channel;
5940 if (channel) {
5941 rc = channel->flush();
5942 if (rc) {
5943 LOGE("Flushing the channels failed with error %d", rc);
5944 // even though the channel flush failed we need to continue and
5945 // return the buffers we have to the framework, however the return
5946 // value will be an error
5947 rc = -ENODEV;
5948 }
5949 }
5950 }
5951
5952 /* notify the frameworks and send errored results */
5953 rc = notifyErrorForPendingRequests();
5954 if (rc < 0) {
5955 LOGE("notifyErrorForPendingRequests failed");
5956 pthread_mutex_unlock(&mMutex);
5957 return rc;
5958 }
5959
5960 //unblock process_capture_request
5961 mPendingLiveRequest = 0;
5962 unblockRequestIfNecessary();
5963
5964 mFlushPerf = false;
5965 pthread_mutex_unlock(&mMutex);
5966 LOGD ("Flush Operation complete. rc = %d", rc);
5967 return rc;
5968}
5969
5970/*===========================================================================
5971 * FUNCTION : handleCameraDeviceError
5972 *
5973 * DESCRIPTION: This function calls internal flush and notifies the error to
5974 * framework and updates the state variable.
5975 *
5976 * PARAMETERS : None
5977 *
5978 * RETURN : NO_ERROR on Success
5979 * Error code on failure
5980 *==========================================================================*/
5981int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5982{
5983 int32_t rc = NO_ERROR;
5984
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005985 {
5986 Mutex::Autolock lock(mFlushLock);
5987 pthread_mutex_lock(&mMutex);
5988 if (mState != ERROR) {
5989 //if mState != ERROR, nothing to be done
5990 pthread_mutex_unlock(&mMutex);
5991 return NO_ERROR;
5992 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005993 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005994
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005995 rc = flush(false /* restart channels */);
5996 if (NO_ERROR != rc) {
5997 LOGE("internal flush to handle mState = ERROR failed");
5998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005999
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006000 pthread_mutex_lock(&mMutex);
6001 mState = DEINIT;
6002 pthread_mutex_unlock(&mMutex);
6003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006004
6005 camera3_notify_msg_t notify_msg;
6006 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6007 notify_msg.type = CAMERA3_MSG_ERROR;
6008 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6009 notify_msg.message.error.error_stream = NULL;
6010 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006011 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006012
6013 return rc;
6014}
6015
6016/*===========================================================================
6017 * FUNCTION : captureResultCb
6018 *
6019 * DESCRIPTION: Callback handler for all capture result
6020 * (streams, as well as metadata)
6021 *
6022 * PARAMETERS :
6023 * @metadata : metadata information
6024 * @buffer : actual gralloc buffer to be returned to frameworks.
6025 * NULL if metadata.
6026 *
6027 * RETURN : NONE
6028 *==========================================================================*/
6029void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6030 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6031{
6032 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006033 pthread_mutex_lock(&mMutex);
6034 uint8_t batchSize = mBatchSize;
6035 pthread_mutex_unlock(&mMutex);
6036 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006037 handleBatchMetadata(metadata_buf,
6038 true /* free_and_bufdone_meta_buf */);
6039 } else { /* mBatchSize = 0 */
6040 hdrPlusPerfLock(metadata_buf);
6041 pthread_mutex_lock(&mMutex);
6042 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006043 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006044 false /* first frame of batch metadata */ ,
6045 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006046 pthread_mutex_unlock(&mMutex);
6047 }
6048 } else if (isInputBuffer) {
6049 pthread_mutex_lock(&mMutex);
6050 handleInputBufferWithLock(frame_number);
6051 pthread_mutex_unlock(&mMutex);
6052 } else {
6053 pthread_mutex_lock(&mMutex);
6054 handleBufferWithLock(buffer, frame_number);
6055 pthread_mutex_unlock(&mMutex);
6056 }
6057 return;
6058}
6059
6060/*===========================================================================
6061 * FUNCTION : getReprocessibleOutputStreamId
6062 *
6063 * DESCRIPTION: Get source output stream id for the input reprocess stream
6064 * based on size and format, which would be the largest
6065 * output stream if an input stream exists.
6066 *
6067 * PARAMETERS :
6068 * @id : return the stream id if found
6069 *
6070 * RETURN : int32_t type of status
6071 * NO_ERROR -- success
6072 * none-zero failure code
6073 *==========================================================================*/
6074int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6075{
6076 /* check if any output or bidirectional stream with the same size and format
6077 and return that stream */
6078 if ((mInputStreamInfo.dim.width > 0) &&
6079 (mInputStreamInfo.dim.height > 0)) {
6080 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6081 it != mStreamInfo.end(); it++) {
6082
6083 camera3_stream_t *stream = (*it)->stream;
6084 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6085 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6086 (stream->format == mInputStreamInfo.format)) {
6087 // Usage flag for an input stream and the source output stream
6088 // may be different.
6089 LOGD("Found reprocessible output stream! %p", *it);
6090 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6091 stream->usage, mInputStreamInfo.usage);
6092
6093 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6094 if (channel != NULL && channel->mStreams[0]) {
6095 id = channel->mStreams[0]->getMyServerID();
6096 return NO_ERROR;
6097 }
6098 }
6099 }
6100 } else {
6101 LOGD("No input stream, so no reprocessible output stream");
6102 }
6103 return NAME_NOT_FOUND;
6104}
6105
6106/*===========================================================================
6107 * FUNCTION : lookupFwkName
6108 *
6109 * DESCRIPTION: In case the enum is not same in fwk and backend
6110 * make sure the parameter is correctly propogated
6111 *
6112 * PARAMETERS :
6113 * @arr : map between the two enums
6114 * @len : len of the map
6115 * @hal_name : name of the hal_parm to map
6116 *
6117 * RETURN : int type of status
6118 * fwk_name -- success
6119 * none-zero failure code
6120 *==========================================================================*/
6121template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6122 size_t len, halType hal_name)
6123{
6124
6125 for (size_t i = 0; i < len; i++) {
6126 if (arr[i].hal_name == hal_name) {
6127 return arr[i].fwk_name;
6128 }
6129 }
6130
6131 /* Not able to find matching framework type is not necessarily
6132 * an error case. This happens when mm-camera supports more attributes
6133 * than the frameworks do */
6134 LOGH("Cannot find matching framework type");
6135 return NAME_NOT_FOUND;
6136}
6137
6138/*===========================================================================
6139 * FUNCTION : lookupHalName
6140 *
6141 * DESCRIPTION: In case the enum is not same in fwk and backend
6142 * make sure the parameter is correctly propogated
6143 *
6144 * PARAMETERS :
6145 * @arr : map between the two enums
6146 * @len : len of the map
6147 * @fwk_name : name of the hal_parm to map
6148 *
6149 * RETURN : int32_t type of status
6150 * hal_name -- success
6151 * none-zero failure code
6152 *==========================================================================*/
6153template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6154 size_t len, fwkType fwk_name)
6155{
6156 for (size_t i = 0; i < len; i++) {
6157 if (arr[i].fwk_name == fwk_name) {
6158 return arr[i].hal_name;
6159 }
6160 }
6161
6162 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6163 return NAME_NOT_FOUND;
6164}
6165
6166/*===========================================================================
6167 * FUNCTION : lookupProp
6168 *
6169 * DESCRIPTION: lookup a value by its name
6170 *
6171 * PARAMETERS :
6172 * @arr : map between the two enums
6173 * @len : size of the map
6174 * @name : name to be looked up
6175 *
6176 * RETURN : Value if found
6177 * CAM_CDS_MODE_MAX if not found
6178 *==========================================================================*/
6179template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6180 size_t len, const char *name)
6181{
6182 if (name) {
6183 for (size_t i = 0; i < len; i++) {
6184 if (!strcmp(arr[i].desc, name)) {
6185 return arr[i].val;
6186 }
6187 }
6188 }
6189 return CAM_CDS_MODE_MAX;
6190}
6191
6192/*===========================================================================
6193 *
6194 * DESCRIPTION:
6195 *
6196 * PARAMETERS :
6197 * @metadata : metadata information from callback
6198 * @timestamp: metadata buffer timestamp
6199 * @request_id: request id
6200 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006201 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006202 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6203 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006204 * @pprocDone: whether internal offline postprocsesing is done
6205 *
6206 * RETURN : camera_metadata_t*
6207 * metadata in a format specified by fwk
6208 *==========================================================================*/
6209camera_metadata_t*
6210QCamera3HardwareInterface::translateFromHalMetadata(
6211 metadata_buffer_t *metadata,
6212 nsecs_t timestamp,
6213 int32_t request_id,
6214 const CameraMetadata& jpegMetadata,
6215 uint8_t pipeline_depth,
6216 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006217 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006218 /* DevCamDebug metadata translateFromHalMetadata argument */
6219 uint8_t DevCamDebug_meta_enable,
6220 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006222 uint8_t fwk_cacMode,
6223 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006224{
6225 CameraMetadata camMetadata;
6226 camera_metadata_t *resultMetadata;
6227
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006228 if (mBatchSize && !firstMetadataInBatch) {
6229 /* In batch mode, use cached metadata from the first metadata
6230 in the batch */
6231 camMetadata.clear();
6232 camMetadata = mCachedMetadata;
6233 }
6234
Thierry Strudel3d639192016-09-09 11:52:26 -07006235 if (jpegMetadata.entryCount())
6236 camMetadata.append(jpegMetadata);
6237
6238 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6239 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6240 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6241 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006242 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006243 if (mBatchSize == 0) {
6244 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6245 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6246 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006247
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006248 if (mBatchSize && !firstMetadataInBatch) {
6249 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6250 resultMetadata = camMetadata.release();
6251 return resultMetadata;
6252 }
6253
Samuel Ha68ba5172016-12-15 18:41:12 -08006254 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6255 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6256 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6257 // DevCamDebug metadata translateFromHalMetadata AF
6258 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6259 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6260 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6261 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6262 }
6263 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6264 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6265 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6266 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6267 }
6268 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6269 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6270 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6271 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6272 }
6273 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6274 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6275 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6276 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6277 }
6278 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6279 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6280 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6281 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6282 }
6283 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6284 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6285 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6286 *DevCamDebug_af_monitor_pdaf_target_pos;
6287 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6288 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6289 }
6290 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6291 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6292 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6293 *DevCamDebug_af_monitor_pdaf_confidence;
6294 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6295 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6296 }
6297 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6298 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6299 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6300 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6301 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6302 }
6303 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6304 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6305 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6306 *DevCamDebug_af_monitor_tof_target_pos;
6307 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6308 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6309 }
6310 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6311 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6312 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6313 *DevCamDebug_af_monitor_tof_confidence;
6314 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6315 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6316 }
6317 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6318 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6319 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6320 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6321 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6322 }
6323 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6324 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6325 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6326 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6327 &fwk_DevCamDebug_af_monitor_type_select, 1);
6328 }
6329 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6330 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6331 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6332 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6333 &fwk_DevCamDebug_af_monitor_refocus, 1);
6334 }
6335 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6336 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6337 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6338 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6339 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6340 }
6341 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6342 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6343 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6344 *DevCamDebug_af_search_pdaf_target_pos;
6345 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6346 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6347 }
6348 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6349 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6350 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6351 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6352 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6353 }
6354 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6355 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6356 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6357 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6358 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6359 }
6360 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6361 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6362 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6363 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6364 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6365 }
6366 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6367 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6368 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6369 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6370 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6371 }
6372 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6373 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6374 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6375 *DevCamDebug_af_search_tof_target_pos;
6376 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6377 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6378 }
6379 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6380 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6381 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6382 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6383 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6384 }
6385 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6386 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6387 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6388 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6389 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6390 }
6391 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6392 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6393 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6394 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6395 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6396 }
6397 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6398 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6399 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6400 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6401 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6402 }
6403 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6404 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6405 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6406 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6407 &fwk_DevCamDebug_af_search_type_select, 1);
6408 }
6409 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6410 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6411 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6412 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6413 &fwk_DevCamDebug_af_search_next_pos, 1);
6414 }
6415 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6416 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6417 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6418 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6419 &fwk_DevCamDebug_af_search_target_pos, 1);
6420 }
6421 // DevCamDebug metadata translateFromHalMetadata AEC
6422 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6423 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6424 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6425 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6426 }
6427 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6428 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6429 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6430 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6431 }
6432 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6433 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6434 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6435 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6436 }
6437 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6438 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6439 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6440 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6441 }
6442 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6443 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6444 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6445 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6446 }
6447 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6448 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6449 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6450 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6451 }
6452 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6453 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6454 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6455 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6456 }
6457 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6458 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6459 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6460 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6461 }
Samuel Ha34229982017-02-17 13:51:11 -08006462 // DevCamDebug metadata translateFromHalMetadata zzHDR
6463 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6464 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6465 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6466 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6467 }
6468 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6469 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6470 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6471 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6472 }
6473 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6474 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6475 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6476 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6477 }
6478 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6479 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6480 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6481 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6482 }
6483 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6484 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6485 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6486 *DevCamDebug_aec_hdr_sensitivity_ratio;
6487 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6488 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6489 }
6490 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6491 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6492 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6493 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6494 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6495 }
6496 // DevCamDebug metadata translateFromHalMetadata ADRC
6497 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6498 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6499 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6500 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6501 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6502 }
6503 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6504 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6505 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6506 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6507 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6508 }
6509 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6510 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6511 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6512 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6513 }
6514 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6515 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6516 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6517 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6518 }
6519 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6520 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6521 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6522 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6523 }
6524 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6525 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6526 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6527 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6528 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006529 // DevCamDebug metadata translateFromHalMetadata AWB
6530 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6531 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6532 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6533 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6534 }
6535 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6536 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6537 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6538 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6539 }
6540 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6541 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6542 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6543 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6544 }
6545 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6546 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6547 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6548 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6549 }
6550 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6551 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6552 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6553 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6554 }
6555 }
6556 // atrace_end(ATRACE_TAG_ALWAYS);
6557
Thierry Strudel3d639192016-09-09 11:52:26 -07006558 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6559 int64_t fwk_frame_number = *frame_number;
6560 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6561 }
6562
6563 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6564 int32_t fps_range[2];
6565 fps_range[0] = (int32_t)float_range->min_fps;
6566 fps_range[1] = (int32_t)float_range->max_fps;
6567 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6568 fps_range, 2);
6569 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6570 fps_range[0], fps_range[1]);
6571 }
6572
6573 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6574 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6575 }
6576
6577 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6578 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6579 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6580 *sceneMode);
6581 if (NAME_NOT_FOUND != val) {
6582 uint8_t fwkSceneMode = (uint8_t)val;
6583 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6584 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6585 fwkSceneMode);
6586 }
6587 }
6588
6589 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6590 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6591 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6592 }
6593
6594 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6595 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6596 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6597 }
6598
6599 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6600 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6601 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6602 }
6603
6604 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6605 CAM_INTF_META_EDGE_MODE, metadata) {
6606 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6607 }
6608
6609 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6610 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6611 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6612 }
6613
6614 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6615 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6616 }
6617
6618 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6619 if (0 <= *flashState) {
6620 uint8_t fwk_flashState = (uint8_t) *flashState;
6621 if (!gCamCapability[mCameraId]->flash_available) {
6622 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6623 }
6624 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6625 }
6626 }
6627
6628 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6629 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6630 if (NAME_NOT_FOUND != val) {
6631 uint8_t fwk_flashMode = (uint8_t)val;
6632 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6633 }
6634 }
6635
6636 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6637 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6638 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6639 }
6640
6641 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6642 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6643 }
6644
6645 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6646 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6647 }
6648
6649 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6650 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6651 }
6652
6653 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6654 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6655 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6656 }
6657
6658 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6659 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6660 LOGD("fwk_videoStab = %d", fwk_videoStab);
6661 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6662 } else {
6663 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6664 // and so hardcoding the Video Stab result to OFF mode.
6665 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6666 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006667 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006668 }
6669
6670 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6671 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6672 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6673 }
6674
6675 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6676 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6677 }
6678
Thierry Strudel3d639192016-09-09 11:52:26 -07006679 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6680 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006681 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006682
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006683 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6684 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006685
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006686 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006687 blackLevelAppliedPattern->cam_black_level[0],
6688 blackLevelAppliedPattern->cam_black_level[1],
6689 blackLevelAppliedPattern->cam_black_level[2],
6690 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006691 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6692 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006693
6694#ifndef USE_HAL_3_3
6695 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006696 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6697 // depth space.
6698 fwk_blackLevelInd[0] /= 4.0;
6699 fwk_blackLevelInd[1] /= 4.0;
6700 fwk_blackLevelInd[2] /= 4.0;
6701 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006702 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6703 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006704#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006705 }
6706
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006707#ifndef USE_HAL_3_3
6708 // Fixed whitelevel is used by ISP/Sensor
6709 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6710 &gCamCapability[mCameraId]->white_level, 1);
6711#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006712
6713 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6714 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6715 int32_t scalerCropRegion[4];
6716 scalerCropRegion[0] = hScalerCropRegion->left;
6717 scalerCropRegion[1] = hScalerCropRegion->top;
6718 scalerCropRegion[2] = hScalerCropRegion->width;
6719 scalerCropRegion[3] = hScalerCropRegion->height;
6720
6721 // Adjust crop region from sensor output coordinate system to active
6722 // array coordinate system.
6723 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6724 scalerCropRegion[2], scalerCropRegion[3]);
6725
6726 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6727 }
6728
6729 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6730 LOGD("sensorExpTime = %lld", *sensorExpTime);
6731 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6732 }
6733
6734 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6735 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6736 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6737 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6738 }
6739
6740 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6741 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6742 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6743 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6744 sensorRollingShutterSkew, 1);
6745 }
6746
6747 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6748 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6749 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6750
6751 //calculate the noise profile based on sensitivity
6752 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6753 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6754 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6755 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6756 noise_profile[i] = noise_profile_S;
6757 noise_profile[i+1] = noise_profile_O;
6758 }
6759 LOGD("noise model entry (S, O) is (%f, %f)",
6760 noise_profile_S, noise_profile_O);
6761 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6762 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6763 }
6764
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006765#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006766 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006767 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006768 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006769 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006770 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6771 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6772 }
6773 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006774#endif
6775
Thierry Strudel3d639192016-09-09 11:52:26 -07006776 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6777 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6778 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6779 }
6780
6781 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6782 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6783 *faceDetectMode);
6784 if (NAME_NOT_FOUND != val) {
6785 uint8_t fwk_faceDetectMode = (uint8_t)val;
6786 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6787
6788 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6789 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6790 CAM_INTF_META_FACE_DETECTION, metadata) {
6791 uint8_t numFaces = MIN(
6792 faceDetectionInfo->num_faces_detected, MAX_ROI);
6793 int32_t faceIds[MAX_ROI];
6794 uint8_t faceScores[MAX_ROI];
6795 int32_t faceRectangles[MAX_ROI * 4];
6796 int32_t faceLandmarks[MAX_ROI * 6];
6797 size_t j = 0, k = 0;
6798
6799 for (size_t i = 0; i < numFaces; i++) {
6800 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6801 // Adjust crop region from sensor output coordinate system to active
6802 // array coordinate system.
6803 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6804 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6805 rect.width, rect.height);
6806
6807 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6808 faceRectangles+j, -1);
6809
6810 j+= 4;
6811 }
6812 if (numFaces <= 0) {
6813 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6814 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6815 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6816 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6817 }
6818
6819 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6820 numFaces);
6821 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6822 faceRectangles, numFaces * 4U);
6823 if (fwk_faceDetectMode ==
6824 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6825 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6826 CAM_INTF_META_FACE_LANDMARK, metadata) {
6827
6828 for (size_t i = 0; i < numFaces; i++) {
6829 // Map the co-ordinate sensor output coordinate system to active
6830 // array coordinate system.
6831 mCropRegionMapper.toActiveArray(
6832 landmarks->face_landmarks[i].left_eye_center.x,
6833 landmarks->face_landmarks[i].left_eye_center.y);
6834 mCropRegionMapper.toActiveArray(
6835 landmarks->face_landmarks[i].right_eye_center.x,
6836 landmarks->face_landmarks[i].right_eye_center.y);
6837 mCropRegionMapper.toActiveArray(
6838 landmarks->face_landmarks[i].mouth_center.x,
6839 landmarks->face_landmarks[i].mouth_center.y);
6840
6841 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006842 k+= TOTAL_LANDMARK_INDICES;
6843 }
6844 } else {
6845 for (size_t i = 0; i < numFaces; i++) {
6846 setInvalidLandmarks(faceLandmarks+k);
6847 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006848 }
6849 }
6850
6851 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6852 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6853 faceLandmarks, numFaces * 6U);
6854 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006855 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6856 CAM_INTF_META_FACE_BLINK, metadata) {
6857 uint8_t detected[MAX_ROI];
6858 uint8_t degree[MAX_ROI * 2];
6859 for (size_t i = 0; i < numFaces; i++) {
6860 detected[i] = blinks->blink[i].blink_detected;
6861 degree[2 * i] = blinks->blink[i].left_blink;
6862 degree[2 * i + 1] = blinks->blink[i].right_blink;
6863 }
6864 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6865 detected, numFaces);
6866 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6867 degree, numFaces * 2);
6868 }
6869 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6870 CAM_INTF_META_FACE_SMILE, metadata) {
6871 uint8_t degree[MAX_ROI];
6872 uint8_t confidence[MAX_ROI];
6873 for (size_t i = 0; i < numFaces; i++) {
6874 degree[i] = smiles->smile[i].smile_degree;
6875 confidence[i] = smiles->smile[i].smile_confidence;
6876 }
6877 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6878 degree, numFaces);
6879 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6880 confidence, numFaces);
6881 }
6882 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6883 CAM_INTF_META_FACE_GAZE, metadata) {
6884 int8_t angle[MAX_ROI];
6885 int32_t direction[MAX_ROI * 3];
6886 int8_t degree[MAX_ROI * 2];
6887 for (size_t i = 0; i < numFaces; i++) {
6888 angle[i] = gazes->gaze[i].gaze_angle;
6889 direction[3 * i] = gazes->gaze[i].updown_dir;
6890 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6891 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6892 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6893 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6894 }
6895 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6896 (uint8_t *)angle, numFaces);
6897 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6898 direction, numFaces * 3);
6899 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6900 (uint8_t *)degree, numFaces * 2);
6901 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006902 }
6903 }
6904 }
6905 }
6906
6907 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6908 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08006909 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006910 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08006911 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006912
Shuzhen Wang14415f52016-11-16 18:26:18 -08006913 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
6914 histogramBins = *histBins;
6915 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
6916 }
6917
6918 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006919 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6920 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08006921 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006922
6923 switch (stats_data->type) {
6924 case CAM_HISTOGRAM_TYPE_BAYER:
6925 switch (stats_data->bayer_stats.data_type) {
6926 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006927 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
6928 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006929 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006930 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
6931 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006932 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006933 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
6934 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006935 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006936 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006937 case CAM_STATS_CHANNEL_R:
6938 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006939 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
6940 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006941 }
6942 break;
6943 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08006944 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006945 break;
6946 }
6947
Shuzhen Wang14415f52016-11-16 18:26:18 -08006948 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006949 }
6950 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006951 }
6952
6953 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6954 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6955 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6956 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6957 }
6958
6959 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6960 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6961 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6962 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6963 }
6964
6965 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6966 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6967 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6968 CAM_MAX_SHADING_MAP_HEIGHT);
6969 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6970 CAM_MAX_SHADING_MAP_WIDTH);
6971 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6972 lensShadingMap->lens_shading, 4U * map_width * map_height);
6973 }
6974
6975 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6976 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6977 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6978 }
6979
6980 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6981 //Populate CAM_INTF_META_TONEMAP_CURVES
6982 /* ch0 = G, ch 1 = B, ch 2 = R*/
6983 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6984 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6985 tonemap->tonemap_points_cnt,
6986 CAM_MAX_TONEMAP_CURVE_SIZE);
6987 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6988 }
6989
6990 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6991 &tonemap->curves[0].tonemap_points[0][0],
6992 tonemap->tonemap_points_cnt * 2);
6993
6994 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6995 &tonemap->curves[1].tonemap_points[0][0],
6996 tonemap->tonemap_points_cnt * 2);
6997
6998 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6999 &tonemap->curves[2].tonemap_points[0][0],
7000 tonemap->tonemap_points_cnt * 2);
7001 }
7002
7003 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7004 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7005 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7006 CC_GAIN_MAX);
7007 }
7008
7009 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7010 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7011 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7012 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7013 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7014 }
7015
7016 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7017 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7018 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7019 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7020 toneCurve->tonemap_points_cnt,
7021 CAM_MAX_TONEMAP_CURVE_SIZE);
7022 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7023 }
7024 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7025 (float*)toneCurve->curve.tonemap_points,
7026 toneCurve->tonemap_points_cnt * 2);
7027 }
7028
7029 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7030 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7031 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7032 predColorCorrectionGains->gains, 4);
7033 }
7034
7035 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7036 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7037 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7038 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7039 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7040 }
7041
7042 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7043 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7044 }
7045
7046 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7047 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7048 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7049 }
7050
7051 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7052 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7053 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7054 }
7055
7056 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7057 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7058 *effectMode);
7059 if (NAME_NOT_FOUND != val) {
7060 uint8_t fwk_effectMode = (uint8_t)val;
7061 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7062 }
7063 }
7064
7065 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7066 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7067 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7068 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7069 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7070 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7071 }
7072 int32_t fwk_testPatternData[4];
7073 fwk_testPatternData[0] = testPatternData->r;
7074 fwk_testPatternData[3] = testPatternData->b;
7075 switch (gCamCapability[mCameraId]->color_arrangement) {
7076 case CAM_FILTER_ARRANGEMENT_RGGB:
7077 case CAM_FILTER_ARRANGEMENT_GRBG:
7078 fwk_testPatternData[1] = testPatternData->gr;
7079 fwk_testPatternData[2] = testPatternData->gb;
7080 break;
7081 case CAM_FILTER_ARRANGEMENT_GBRG:
7082 case CAM_FILTER_ARRANGEMENT_BGGR:
7083 fwk_testPatternData[2] = testPatternData->gr;
7084 fwk_testPatternData[1] = testPatternData->gb;
7085 break;
7086 default:
7087 LOGE("color arrangement %d is not supported",
7088 gCamCapability[mCameraId]->color_arrangement);
7089 break;
7090 }
7091 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7092 }
7093
7094 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7095 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7096 }
7097
7098 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7099 String8 str((const char *)gps_methods);
7100 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7101 }
7102
7103 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7104 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7105 }
7106
7107 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7108 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7109 }
7110
7111 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7112 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7113 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7114 }
7115
7116 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7117 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7118 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7119 }
7120
7121 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7122 int32_t fwk_thumb_size[2];
7123 fwk_thumb_size[0] = thumb_size->width;
7124 fwk_thumb_size[1] = thumb_size->height;
7125 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7126 }
7127
7128 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7129 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7130 privateData,
7131 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7132 }
7133
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007134 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007135 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007136 meteringMode, 1);
7137 }
7138
Thierry Strudel54dc9782017-02-15 12:12:10 -08007139 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7140 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7141 LOGD("hdr_scene_data: %d %f\n",
7142 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7143 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7144 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7145 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7146 &isHdr, 1);
7147 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7148 &isHdrConfidence, 1);
7149 }
7150
7151
7152
Thierry Strudel3d639192016-09-09 11:52:26 -07007153 if (metadata->is_tuning_params_valid) {
7154 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7155 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7156 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7157
7158
7159 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7160 sizeof(uint32_t));
7161 data += sizeof(uint32_t);
7162
7163 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7164 sizeof(uint32_t));
7165 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7166 data += sizeof(uint32_t);
7167
7168 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7169 sizeof(uint32_t));
7170 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7171 data += sizeof(uint32_t);
7172
7173 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7174 sizeof(uint32_t));
7175 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7176 data += sizeof(uint32_t);
7177
7178 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7179 sizeof(uint32_t));
7180 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7181 data += sizeof(uint32_t);
7182
7183 metadata->tuning_params.tuning_mod3_data_size = 0;
7184 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7185 sizeof(uint32_t));
7186 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7187 data += sizeof(uint32_t);
7188
7189 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7190 TUNING_SENSOR_DATA_MAX);
7191 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7192 count);
7193 data += count;
7194
7195 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7196 TUNING_VFE_DATA_MAX);
7197 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7198 count);
7199 data += count;
7200
7201 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7202 TUNING_CPP_DATA_MAX);
7203 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7204 count);
7205 data += count;
7206
7207 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7208 TUNING_CAC_DATA_MAX);
7209 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7210 count);
7211 data += count;
7212
7213 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7214 (int32_t *)(void *)tuning_meta_data_blob,
7215 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7216 }
7217
7218 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7219 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7220 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7221 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7222 NEUTRAL_COL_POINTS);
7223 }
7224
7225 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7226 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7227 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7228 }
7229
7230 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7231 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7232 // Adjust crop region from sensor output coordinate system to active
7233 // array coordinate system.
7234 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7235 hAeRegions->rect.width, hAeRegions->rect.height);
7236
7237 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7238 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7239 REGIONS_TUPLE_COUNT);
7240 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7241 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7242 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7243 hAeRegions->rect.height);
7244 }
7245
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007246 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7247 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7248 if (NAME_NOT_FOUND != val) {
7249 uint8_t fwkAfMode = (uint8_t)val;
7250 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7251 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7252 } else {
7253 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7254 val);
7255 }
7256 }
7257
Thierry Strudel3d639192016-09-09 11:52:26 -07007258 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7259 uint8_t fwk_afState = (uint8_t) *afState;
7260 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007261 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007262 }
7263
7264 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7265 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7266 }
7267
7268 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7269 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7270 }
7271
7272 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7273 uint8_t fwk_lensState = *lensState;
7274 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7275 }
7276
7277 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7278 /*af regions*/
7279 int32_t afRegions[REGIONS_TUPLE_COUNT];
7280 // Adjust crop region from sensor output coordinate system to active
7281 // array coordinate system.
7282 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7283 hAfRegions->rect.width, hAfRegions->rect.height);
7284
7285 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7286 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7287 REGIONS_TUPLE_COUNT);
7288 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7289 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7290 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7291 hAfRegions->rect.height);
7292 }
7293
7294 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007295 uint32_t ab_mode = *hal_ab_mode;
7296 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7297 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7298 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7299 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007300 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007301 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007302 if (NAME_NOT_FOUND != val) {
7303 uint8_t fwk_ab_mode = (uint8_t)val;
7304 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7305 }
7306 }
7307
7308 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7309 int val = lookupFwkName(SCENE_MODES_MAP,
7310 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7311 if (NAME_NOT_FOUND != val) {
7312 uint8_t fwkBestshotMode = (uint8_t)val;
7313 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7314 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7315 } else {
7316 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7317 }
7318 }
7319
7320 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7321 uint8_t fwk_mode = (uint8_t) *mode;
7322 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7323 }
7324
7325 /* Constant metadata values to be update*/
7326 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7327 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7328
7329 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7330 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7331
7332 int32_t hotPixelMap[2];
7333 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7334
7335 // CDS
7336 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7337 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7338 }
7339
Thierry Strudel04e026f2016-10-10 11:27:36 -07007340 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7341 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007342 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007343 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7344 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7345 } else {
7346 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7347 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007348
7349 if(fwk_hdr != curr_hdr_state) {
7350 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7351 if(fwk_hdr)
7352 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7353 else
7354 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7355 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007356 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7357 }
7358
Thierry Strudel54dc9782017-02-15 12:12:10 -08007359 //binning correction
7360 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7361 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7362 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7363 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7364 }
7365
Thierry Strudel04e026f2016-10-10 11:27:36 -07007366 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007367 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007368 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7369 int8_t is_ir_on = 0;
7370
7371 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7372 if(is_ir_on != curr_ir_state) {
7373 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7374 if(is_ir_on)
7375 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7376 else
7377 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7378 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007379 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007380 }
7381
Thierry Strudel269c81a2016-10-12 12:13:59 -07007382 // AEC SPEED
7383 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7384 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7385 }
7386
7387 // AWB SPEED
7388 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7389 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7390 }
7391
Thierry Strudel3d639192016-09-09 11:52:26 -07007392 // TNR
7393 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7394 uint8_t tnr_enable = tnr->denoise_enable;
7395 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007396 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7397 int8_t is_tnr_on = 0;
7398
7399 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7400 if(is_tnr_on != curr_tnr_state) {
7401 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7402 if(is_tnr_on)
7403 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7404 else
7405 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7406 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007407
7408 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7409 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7410 }
7411
7412 // Reprocess crop data
7413 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7414 uint8_t cnt = crop_data->num_of_streams;
7415 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7416 // mm-qcamera-daemon only posts crop_data for streams
7417 // not linked to pproc. So no valid crop metadata is not
7418 // necessarily an error case.
7419 LOGD("No valid crop metadata entries");
7420 } else {
7421 uint32_t reproc_stream_id;
7422 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7423 LOGD("No reprocessible stream found, ignore crop data");
7424 } else {
7425 int rc = NO_ERROR;
7426 Vector<int32_t> roi_map;
7427 int32_t *crop = new int32_t[cnt*4];
7428 if (NULL == crop) {
7429 rc = NO_MEMORY;
7430 }
7431 if (NO_ERROR == rc) {
7432 int32_t streams_found = 0;
7433 for (size_t i = 0; i < cnt; i++) {
7434 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7435 if (pprocDone) {
7436 // HAL already does internal reprocessing,
7437 // either via reprocessing before JPEG encoding,
7438 // or offline postprocessing for pproc bypass case.
7439 crop[0] = 0;
7440 crop[1] = 0;
7441 crop[2] = mInputStreamInfo.dim.width;
7442 crop[3] = mInputStreamInfo.dim.height;
7443 } else {
7444 crop[0] = crop_data->crop_info[i].crop.left;
7445 crop[1] = crop_data->crop_info[i].crop.top;
7446 crop[2] = crop_data->crop_info[i].crop.width;
7447 crop[3] = crop_data->crop_info[i].crop.height;
7448 }
7449 roi_map.add(crop_data->crop_info[i].roi_map.left);
7450 roi_map.add(crop_data->crop_info[i].roi_map.top);
7451 roi_map.add(crop_data->crop_info[i].roi_map.width);
7452 roi_map.add(crop_data->crop_info[i].roi_map.height);
7453 streams_found++;
7454 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7455 crop[0], crop[1], crop[2], crop[3]);
7456 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7457 crop_data->crop_info[i].roi_map.left,
7458 crop_data->crop_info[i].roi_map.top,
7459 crop_data->crop_info[i].roi_map.width,
7460 crop_data->crop_info[i].roi_map.height);
7461 break;
7462
7463 }
7464 }
7465 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7466 &streams_found, 1);
7467 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7468 crop, (size_t)(streams_found * 4));
7469 if (roi_map.array()) {
7470 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7471 roi_map.array(), roi_map.size());
7472 }
7473 }
7474 if (crop) {
7475 delete [] crop;
7476 }
7477 }
7478 }
7479 }
7480
7481 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7482 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7483 // so hardcoding the CAC result to OFF mode.
7484 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7485 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7486 } else {
7487 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7488 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7489 *cacMode);
7490 if (NAME_NOT_FOUND != val) {
7491 uint8_t resultCacMode = (uint8_t)val;
7492 // check whether CAC result from CB is equal to Framework set CAC mode
7493 // If not equal then set the CAC mode came in corresponding request
7494 if (fwk_cacMode != resultCacMode) {
7495 resultCacMode = fwk_cacMode;
7496 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007497 //Check if CAC is disabled by property
7498 if (m_cacModeDisabled) {
7499 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7500 }
7501
Thierry Strudel3d639192016-09-09 11:52:26 -07007502 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7503 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7504 } else {
7505 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7506 }
7507 }
7508 }
7509
7510 // Post blob of cam_cds_data through vendor tag.
7511 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7512 uint8_t cnt = cdsInfo->num_of_streams;
7513 cam_cds_data_t cdsDataOverride;
7514 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7515 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7516 cdsDataOverride.num_of_streams = 1;
7517 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7518 uint32_t reproc_stream_id;
7519 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7520 LOGD("No reprocessible stream found, ignore cds data");
7521 } else {
7522 for (size_t i = 0; i < cnt; i++) {
7523 if (cdsInfo->cds_info[i].stream_id ==
7524 reproc_stream_id) {
7525 cdsDataOverride.cds_info[0].cds_enable =
7526 cdsInfo->cds_info[i].cds_enable;
7527 break;
7528 }
7529 }
7530 }
7531 } else {
7532 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7533 }
7534 camMetadata.update(QCAMERA3_CDS_INFO,
7535 (uint8_t *)&cdsDataOverride,
7536 sizeof(cam_cds_data_t));
7537 }
7538
7539 // Ldaf calibration data
7540 if (!mLdafCalibExist) {
7541 IF_META_AVAILABLE(uint32_t, ldafCalib,
7542 CAM_INTF_META_LDAF_EXIF, metadata) {
7543 mLdafCalibExist = true;
7544 mLdafCalib[0] = ldafCalib[0];
7545 mLdafCalib[1] = ldafCalib[1];
7546 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7547 ldafCalib[0], ldafCalib[1]);
7548 }
7549 }
7550
Thierry Strudel54dc9782017-02-15 12:12:10 -08007551 // EXIF debug data through vendor tag
7552 /*
7553 * Mobicat Mask can assume 3 values:
7554 * 1 refers to Mobicat data,
7555 * 2 refers to Stats Debug and Exif Debug Data
7556 * 3 refers to Mobicat and Stats Debug Data
7557 * We want to make sure that we are sending Exif debug data
7558 * only when Mobicat Mask is 2.
7559 */
7560 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7561 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7562 (uint8_t *)(void *)mExifParams.debug_params,
7563 sizeof(mm_jpeg_debug_exif_params_t));
7564 }
7565
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007566 // Reprocess and DDM debug data through vendor tag
7567 cam_reprocess_info_t repro_info;
7568 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007569 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7570 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007571 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007572 }
7573 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7574 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007575 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007576 }
7577 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7578 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007579 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007580 }
7581 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7582 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007583 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007584 }
7585 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7586 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007587 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007588 }
7589 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007590 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007591 }
7592 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7593 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007594 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007595 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007596 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7597 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7598 }
7599 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7600 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7601 }
7602 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7603 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007604
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007605 // INSTANT AEC MODE
7606 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7607 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7608 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7609 }
7610
Shuzhen Wange763e802016-03-31 10:24:29 -07007611 // AF scene change
7612 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7613 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7614 }
7615
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007616 /* In batch mode, cache the first metadata in the batch */
7617 if (mBatchSize && firstMetadataInBatch) {
7618 mCachedMetadata.clear();
7619 mCachedMetadata = camMetadata;
7620 }
7621
Thierry Strudel3d639192016-09-09 11:52:26 -07007622 resultMetadata = camMetadata.release();
7623 return resultMetadata;
7624}
7625
7626/*===========================================================================
7627 * FUNCTION : saveExifParams
7628 *
7629 * DESCRIPTION:
7630 *
7631 * PARAMETERS :
7632 * @metadata : metadata information from callback
7633 *
7634 * RETURN : none
7635 *
7636 *==========================================================================*/
7637void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7638{
7639 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7640 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7641 if (mExifParams.debug_params) {
7642 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7643 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7644 }
7645 }
7646 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7647 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7648 if (mExifParams.debug_params) {
7649 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7650 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7651 }
7652 }
7653 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7654 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7655 if (mExifParams.debug_params) {
7656 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7657 mExifParams.debug_params->af_debug_params_valid = TRUE;
7658 }
7659 }
7660 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7661 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7662 if (mExifParams.debug_params) {
7663 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7664 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7665 }
7666 }
7667 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7668 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7669 if (mExifParams.debug_params) {
7670 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7671 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7672 }
7673 }
7674 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7675 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7676 if (mExifParams.debug_params) {
7677 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7678 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7679 }
7680 }
7681 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7682 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7683 if (mExifParams.debug_params) {
7684 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7685 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7686 }
7687 }
7688 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7689 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7690 if (mExifParams.debug_params) {
7691 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7692 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7693 }
7694 }
7695}
7696
7697/*===========================================================================
7698 * FUNCTION : get3AExifParams
7699 *
7700 * DESCRIPTION:
7701 *
7702 * PARAMETERS : none
7703 *
7704 *
7705 * RETURN : mm_jpeg_exif_params_t
7706 *
7707 *==========================================================================*/
7708mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7709{
7710 return mExifParams;
7711}
7712
7713/*===========================================================================
7714 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7715 *
7716 * DESCRIPTION:
7717 *
7718 * PARAMETERS :
7719 * @metadata : metadata information from callback
7720 *
7721 * RETURN : camera_metadata_t*
7722 * metadata in a format specified by fwk
7723 *==========================================================================*/
7724camera_metadata_t*
7725QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7726 (metadata_buffer_t *metadata)
7727{
7728 CameraMetadata camMetadata;
7729 camera_metadata_t *resultMetadata;
7730
7731
7732 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7733 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7734 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7735 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7736 }
7737
7738 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7739 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7740 &aecTrigger->trigger, 1);
7741 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7742 &aecTrigger->trigger_id, 1);
7743 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7744 aecTrigger->trigger);
7745 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7746 aecTrigger->trigger_id);
7747 }
7748
7749 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7750 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7751 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7752 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7753 }
7754
Thierry Strudel3d639192016-09-09 11:52:26 -07007755 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7756 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7757 &af_trigger->trigger, 1);
7758 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7759 af_trigger->trigger);
7760 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7761 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7762 af_trigger->trigger_id);
7763 }
7764
7765 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7766 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7767 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7768 if (NAME_NOT_FOUND != val) {
7769 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7770 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7771 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7772 } else {
7773 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7774 }
7775 }
7776
7777 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7778 uint32_t aeMode = CAM_AE_MODE_MAX;
7779 int32_t flashMode = CAM_FLASH_MODE_MAX;
7780 int32_t redeye = -1;
7781 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7782 aeMode = *pAeMode;
7783 }
7784 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7785 flashMode = *pFlashMode;
7786 }
7787 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7788 redeye = *pRedeye;
7789 }
7790
7791 if (1 == redeye) {
7792 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7793 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7794 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7795 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7796 flashMode);
7797 if (NAME_NOT_FOUND != val) {
7798 fwk_aeMode = (uint8_t)val;
7799 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7800 } else {
7801 LOGE("Unsupported flash mode %d", flashMode);
7802 }
7803 } else if (aeMode == CAM_AE_MODE_ON) {
7804 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7805 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7806 } else if (aeMode == CAM_AE_MODE_OFF) {
7807 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7808 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7809 } else {
7810 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7811 "flashMode:%d, aeMode:%u!!!",
7812 redeye, flashMode, aeMode);
7813 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007814 if (mInstantAEC) {
7815 // Increment frame Idx count untill a bound reached for instant AEC.
7816 mInstantAecFrameIdxCount++;
7817 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7818 CAM_INTF_META_AEC_INFO, metadata) {
7819 LOGH("ae_params->settled = %d",ae_params->settled);
7820 // If AEC settled, or if number of frames reached bound value,
7821 // should reset instant AEC.
7822 if (ae_params->settled ||
7823 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7824 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7825 mInstantAEC = false;
7826 mResetInstantAEC = true;
7827 mInstantAecFrameIdxCount = 0;
7828 }
7829 }
7830 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007831 resultMetadata = camMetadata.release();
7832 return resultMetadata;
7833}
7834
7835/*===========================================================================
7836 * FUNCTION : dumpMetadataToFile
7837 *
7838 * DESCRIPTION: Dumps tuning metadata to file system
7839 *
7840 * PARAMETERS :
7841 * @meta : tuning metadata
7842 * @dumpFrameCount : current dump frame count
7843 * @enabled : Enable mask
7844 *
7845 *==========================================================================*/
7846void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7847 uint32_t &dumpFrameCount,
7848 bool enabled,
7849 const char *type,
7850 uint32_t frameNumber)
7851{
7852 //Some sanity checks
7853 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7854 LOGE("Tuning sensor data size bigger than expected %d: %d",
7855 meta.tuning_sensor_data_size,
7856 TUNING_SENSOR_DATA_MAX);
7857 return;
7858 }
7859
7860 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7861 LOGE("Tuning VFE data size bigger than expected %d: %d",
7862 meta.tuning_vfe_data_size,
7863 TUNING_VFE_DATA_MAX);
7864 return;
7865 }
7866
7867 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7868 LOGE("Tuning CPP data size bigger than expected %d: %d",
7869 meta.tuning_cpp_data_size,
7870 TUNING_CPP_DATA_MAX);
7871 return;
7872 }
7873
7874 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7875 LOGE("Tuning CAC data size bigger than expected %d: %d",
7876 meta.tuning_cac_data_size,
7877 TUNING_CAC_DATA_MAX);
7878 return;
7879 }
7880 //
7881
7882 if(enabled){
7883 char timeBuf[FILENAME_MAX];
7884 char buf[FILENAME_MAX];
7885 memset(buf, 0, sizeof(buf));
7886 memset(timeBuf, 0, sizeof(timeBuf));
7887 time_t current_time;
7888 struct tm * timeinfo;
7889 time (&current_time);
7890 timeinfo = localtime (&current_time);
7891 if (timeinfo != NULL) {
7892 strftime (timeBuf, sizeof(timeBuf),
7893 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7894 }
7895 String8 filePath(timeBuf);
7896 snprintf(buf,
7897 sizeof(buf),
7898 "%dm_%s_%d.bin",
7899 dumpFrameCount,
7900 type,
7901 frameNumber);
7902 filePath.append(buf);
7903 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7904 if (file_fd >= 0) {
7905 ssize_t written_len = 0;
7906 meta.tuning_data_version = TUNING_DATA_VERSION;
7907 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7908 written_len += write(file_fd, data, sizeof(uint32_t));
7909 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7910 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7911 written_len += write(file_fd, data, sizeof(uint32_t));
7912 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7913 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7914 written_len += write(file_fd, data, sizeof(uint32_t));
7915 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7916 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7917 written_len += write(file_fd, data, sizeof(uint32_t));
7918 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7919 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7920 written_len += write(file_fd, data, sizeof(uint32_t));
7921 meta.tuning_mod3_data_size = 0;
7922 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7923 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7924 written_len += write(file_fd, data, sizeof(uint32_t));
7925 size_t total_size = meta.tuning_sensor_data_size;
7926 data = (void *)((uint8_t *)&meta.data);
7927 written_len += write(file_fd, data, total_size);
7928 total_size = meta.tuning_vfe_data_size;
7929 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7930 written_len += write(file_fd, data, total_size);
7931 total_size = meta.tuning_cpp_data_size;
7932 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7933 written_len += write(file_fd, data, total_size);
7934 total_size = meta.tuning_cac_data_size;
7935 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7936 written_len += write(file_fd, data, total_size);
7937 close(file_fd);
7938 }else {
7939 LOGE("fail to open file for metadata dumping");
7940 }
7941 }
7942}
7943
7944/*===========================================================================
7945 * FUNCTION : cleanAndSortStreamInfo
7946 *
7947 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7948 * and sort them such that raw stream is at the end of the list
7949 * This is a workaround for camera daemon constraint.
7950 *
7951 * PARAMETERS : None
7952 *
7953 *==========================================================================*/
7954void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7955{
7956 List<stream_info_t *> newStreamInfo;
7957
7958 /*clean up invalid streams*/
7959 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7960 it != mStreamInfo.end();) {
7961 if(((*it)->status) == INVALID){
7962 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7963 delete channel;
7964 free(*it);
7965 it = mStreamInfo.erase(it);
7966 } else {
7967 it++;
7968 }
7969 }
7970
7971 // Move preview/video/callback/snapshot streams into newList
7972 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7973 it != mStreamInfo.end();) {
7974 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7975 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7976 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7977 newStreamInfo.push_back(*it);
7978 it = mStreamInfo.erase(it);
7979 } else
7980 it++;
7981 }
7982 // Move raw streams into newList
7983 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7984 it != mStreamInfo.end();) {
7985 newStreamInfo.push_back(*it);
7986 it = mStreamInfo.erase(it);
7987 }
7988
7989 mStreamInfo = newStreamInfo;
7990}
7991
7992/*===========================================================================
7993 * FUNCTION : extractJpegMetadata
7994 *
7995 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7996 * JPEG metadata is cached in HAL, and return as part of capture
7997 * result when metadata is returned from camera daemon.
7998 *
7999 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8000 * @request: capture request
8001 *
8002 *==========================================================================*/
8003void QCamera3HardwareInterface::extractJpegMetadata(
8004 CameraMetadata& jpegMetadata,
8005 const camera3_capture_request_t *request)
8006{
8007 CameraMetadata frame_settings;
8008 frame_settings = request->settings;
8009
8010 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8011 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8012 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8013 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8014
8015 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8016 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8017 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8018 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8019
8020 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8021 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8022 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8023 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8024
8025 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8026 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8027 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8028 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8029
8030 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8031 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8032 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8033 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8034
8035 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8036 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8037 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8038 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8039
8040 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8041 int32_t thumbnail_size[2];
8042 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8043 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8044 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8045 int32_t orientation =
8046 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008047 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008048 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8049 int32_t temp;
8050 temp = thumbnail_size[0];
8051 thumbnail_size[0] = thumbnail_size[1];
8052 thumbnail_size[1] = temp;
8053 }
8054 }
8055 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8056 thumbnail_size,
8057 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8058 }
8059
8060}
8061
8062/*===========================================================================
8063 * FUNCTION : convertToRegions
8064 *
8065 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8066 *
8067 * PARAMETERS :
8068 * @rect : cam_rect_t struct to convert
8069 * @region : int32_t destination array
8070 * @weight : if we are converting from cam_area_t, weight is valid
8071 * else weight = -1
8072 *
8073 *==========================================================================*/
8074void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8075 int32_t *region, int weight)
8076{
8077 region[0] = rect.left;
8078 region[1] = rect.top;
8079 region[2] = rect.left + rect.width;
8080 region[3] = rect.top + rect.height;
8081 if (weight > -1) {
8082 region[4] = weight;
8083 }
8084}
8085
8086/*===========================================================================
8087 * FUNCTION : convertFromRegions
8088 *
8089 * DESCRIPTION: helper method to convert from array to cam_rect_t
8090 *
8091 * PARAMETERS :
8092 * @rect : cam_rect_t struct to convert
8093 * @region : int32_t destination array
8094 * @weight : if we are converting from cam_area_t, weight is valid
8095 * else weight = -1
8096 *
8097 *==========================================================================*/
8098void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008099 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008100{
Thierry Strudel3d639192016-09-09 11:52:26 -07008101 int32_t x_min = frame_settings.find(tag).data.i32[0];
8102 int32_t y_min = frame_settings.find(tag).data.i32[1];
8103 int32_t x_max = frame_settings.find(tag).data.i32[2];
8104 int32_t y_max = frame_settings.find(tag).data.i32[3];
8105 roi.weight = frame_settings.find(tag).data.i32[4];
8106 roi.rect.left = x_min;
8107 roi.rect.top = y_min;
8108 roi.rect.width = x_max - x_min;
8109 roi.rect.height = y_max - y_min;
8110}
8111
8112/*===========================================================================
8113 * FUNCTION : resetIfNeededROI
8114 *
8115 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8116 * crop region
8117 *
8118 * PARAMETERS :
8119 * @roi : cam_area_t struct to resize
8120 * @scalerCropRegion : cam_crop_region_t region to compare against
8121 *
8122 *
8123 *==========================================================================*/
8124bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8125 const cam_crop_region_t* scalerCropRegion)
8126{
8127 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8128 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8129 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8130 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8131
8132 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8133 * without having this check the calculations below to validate if the roi
8134 * is inside scalar crop region will fail resulting in the roi not being
8135 * reset causing algorithm to continue to use stale roi window
8136 */
8137 if (roi->weight == 0) {
8138 return true;
8139 }
8140
8141 if ((roi_x_max < scalerCropRegion->left) ||
8142 // right edge of roi window is left of scalar crop's left edge
8143 (roi_y_max < scalerCropRegion->top) ||
8144 // bottom edge of roi window is above scalar crop's top edge
8145 (roi->rect.left > crop_x_max) ||
8146 // left edge of roi window is beyond(right) of scalar crop's right edge
8147 (roi->rect.top > crop_y_max)){
8148 // top edge of roi windo is above scalar crop's top edge
8149 return false;
8150 }
8151 if (roi->rect.left < scalerCropRegion->left) {
8152 roi->rect.left = scalerCropRegion->left;
8153 }
8154 if (roi->rect.top < scalerCropRegion->top) {
8155 roi->rect.top = scalerCropRegion->top;
8156 }
8157 if (roi_x_max > crop_x_max) {
8158 roi_x_max = crop_x_max;
8159 }
8160 if (roi_y_max > crop_y_max) {
8161 roi_y_max = crop_y_max;
8162 }
8163 roi->rect.width = roi_x_max - roi->rect.left;
8164 roi->rect.height = roi_y_max - roi->rect.top;
8165 return true;
8166}
8167
8168/*===========================================================================
8169 * FUNCTION : convertLandmarks
8170 *
8171 * DESCRIPTION: helper method to extract the landmarks from face detection info
8172 *
8173 * PARAMETERS :
8174 * @landmark_data : input landmark data to be converted
8175 * @landmarks : int32_t destination array
8176 *
8177 *
8178 *==========================================================================*/
8179void QCamera3HardwareInterface::convertLandmarks(
8180 cam_face_landmarks_info_t landmark_data,
8181 int32_t *landmarks)
8182{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008183 if (landmark_data.is_left_eye_valid) {
8184 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8185 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8186 } else {
8187 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8188 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8189 }
8190
8191 if (landmark_data.is_right_eye_valid) {
8192 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8193 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8194 } else {
8195 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8196 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8197 }
8198
8199 if (landmark_data.is_mouth_valid) {
8200 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8201 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8202 } else {
8203 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8204 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8205 }
8206}
8207
8208/*===========================================================================
8209 * FUNCTION : setInvalidLandmarks
8210 *
8211 * DESCRIPTION: helper method to set invalid landmarks
8212 *
8213 * PARAMETERS :
8214 * @landmarks : int32_t destination array
8215 *
8216 *
8217 *==========================================================================*/
8218void QCamera3HardwareInterface::setInvalidLandmarks(
8219 int32_t *landmarks)
8220{
8221 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8222 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8223 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8224 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8225 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8226 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008227}
8228
8229#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008230
8231/*===========================================================================
8232 * FUNCTION : getCapabilities
8233 *
8234 * DESCRIPTION: query camera capability from back-end
8235 *
8236 * PARAMETERS :
8237 * @ops : mm-interface ops structure
8238 * @cam_handle : camera handle for which we need capability
8239 *
8240 * RETURN : ptr type of capability structure
8241 * capability for success
8242 * NULL for failure
8243 *==========================================================================*/
8244cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8245 uint32_t cam_handle)
8246{
8247 int rc = NO_ERROR;
8248 QCamera3HeapMemory *capabilityHeap = NULL;
8249 cam_capability_t *cap_ptr = NULL;
8250
8251 if (ops == NULL) {
8252 LOGE("Invalid arguments");
8253 return NULL;
8254 }
8255
8256 capabilityHeap = new QCamera3HeapMemory(1);
8257 if (capabilityHeap == NULL) {
8258 LOGE("creation of capabilityHeap failed");
8259 return NULL;
8260 }
8261
8262 /* Allocate memory for capability buffer */
8263 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8264 if(rc != OK) {
8265 LOGE("No memory for cappability");
8266 goto allocate_failed;
8267 }
8268
8269 /* Map memory for capability buffer */
8270 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8271
8272 rc = ops->map_buf(cam_handle,
8273 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8274 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8275 if(rc < 0) {
8276 LOGE("failed to map capability buffer");
8277 rc = FAILED_TRANSACTION;
8278 goto map_failed;
8279 }
8280
8281 /* Query Capability */
8282 rc = ops->query_capability(cam_handle);
8283 if(rc < 0) {
8284 LOGE("failed to query capability");
8285 rc = FAILED_TRANSACTION;
8286 goto query_failed;
8287 }
8288
8289 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8290 if (cap_ptr == NULL) {
8291 LOGE("out of memory");
8292 rc = NO_MEMORY;
8293 goto query_failed;
8294 }
8295
8296 memset(cap_ptr, 0, sizeof(cam_capability_t));
8297 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8298
8299 int index;
8300 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8301 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8302 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8303 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8304 }
8305
8306query_failed:
8307 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8308map_failed:
8309 capabilityHeap->deallocate();
8310allocate_failed:
8311 delete capabilityHeap;
8312
8313 if (rc != NO_ERROR) {
8314 return NULL;
8315 } else {
8316 return cap_ptr;
8317 }
8318}
8319
Thierry Strudel3d639192016-09-09 11:52:26 -07008320/*===========================================================================
8321 * FUNCTION : initCapabilities
8322 *
8323 * DESCRIPTION: initialize camera capabilities in static data struct
8324 *
8325 * PARAMETERS :
8326 * @cameraId : camera Id
8327 *
8328 * RETURN : int32_t type of status
8329 * NO_ERROR -- success
8330 * none-zero failure code
8331 *==========================================================================*/
8332int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8333{
8334 int rc = 0;
8335 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008336 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008337
8338 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8339 if (rc) {
8340 LOGE("camera_open failed. rc = %d", rc);
8341 goto open_failed;
8342 }
8343 if (!cameraHandle) {
8344 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8345 goto open_failed;
8346 }
8347
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008348 handle = get_main_camera_handle(cameraHandle->camera_handle);
8349 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8350 if (gCamCapability[cameraId] == NULL) {
8351 rc = FAILED_TRANSACTION;
8352 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008353 }
8354
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008355 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008356 if (is_dual_camera_by_idx(cameraId)) {
8357 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8358 gCamCapability[cameraId]->aux_cam_cap =
8359 getCapabilities(cameraHandle->ops, handle);
8360 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8361 rc = FAILED_TRANSACTION;
8362 free(gCamCapability[cameraId]);
8363 goto failed_op;
8364 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008365
8366 // Copy the main camera capability to main_cam_cap struct
8367 gCamCapability[cameraId]->main_cam_cap =
8368 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8369 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8370 LOGE("out of memory");
8371 rc = NO_MEMORY;
8372 goto failed_op;
8373 }
8374 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8375 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008376 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008377failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008378 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8379 cameraHandle = NULL;
8380open_failed:
8381 return rc;
8382}
8383
8384/*==========================================================================
8385 * FUNCTION : get3Aversion
8386 *
8387 * DESCRIPTION: get the Q3A S/W version
8388 *
8389 * PARAMETERS :
8390 * @sw_version: Reference of Q3A structure which will hold version info upon
8391 * return
8392 *
8393 * RETURN : None
8394 *
8395 *==========================================================================*/
8396void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8397{
8398 if(gCamCapability[mCameraId])
8399 sw_version = gCamCapability[mCameraId]->q3a_version;
8400 else
8401 LOGE("Capability structure NULL!");
8402}
8403
8404
8405/*===========================================================================
8406 * FUNCTION : initParameters
8407 *
8408 * DESCRIPTION: initialize camera parameters
8409 *
8410 * PARAMETERS :
8411 *
8412 * RETURN : int32_t type of status
8413 * NO_ERROR -- success
8414 * none-zero failure code
8415 *==========================================================================*/
8416int QCamera3HardwareInterface::initParameters()
8417{
8418 int rc = 0;
8419
8420 //Allocate Set Param Buffer
8421 mParamHeap = new QCamera3HeapMemory(1);
8422 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8423 if(rc != OK) {
8424 rc = NO_MEMORY;
8425 LOGE("Failed to allocate SETPARM Heap memory");
8426 delete mParamHeap;
8427 mParamHeap = NULL;
8428 return rc;
8429 }
8430
8431 //Map memory for parameters buffer
8432 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8433 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8434 mParamHeap->getFd(0),
8435 sizeof(metadata_buffer_t),
8436 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8437 if(rc < 0) {
8438 LOGE("failed to map SETPARM buffer");
8439 rc = FAILED_TRANSACTION;
8440 mParamHeap->deallocate();
8441 delete mParamHeap;
8442 mParamHeap = NULL;
8443 return rc;
8444 }
8445
8446 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8447
8448 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8449 return rc;
8450}
8451
8452/*===========================================================================
8453 * FUNCTION : deinitParameters
8454 *
8455 * DESCRIPTION: de-initialize camera parameters
8456 *
8457 * PARAMETERS :
8458 *
8459 * RETURN : NONE
8460 *==========================================================================*/
8461void QCamera3HardwareInterface::deinitParameters()
8462{
8463 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8464 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8465
8466 mParamHeap->deallocate();
8467 delete mParamHeap;
8468 mParamHeap = NULL;
8469
8470 mParameters = NULL;
8471
8472 free(mPrevParameters);
8473 mPrevParameters = NULL;
8474}
8475
8476/*===========================================================================
8477 * FUNCTION : calcMaxJpegSize
8478 *
8479 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8480 *
8481 * PARAMETERS :
8482 *
8483 * RETURN : max_jpeg_size
8484 *==========================================================================*/
8485size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8486{
8487 size_t max_jpeg_size = 0;
8488 size_t temp_width, temp_height;
8489 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8490 MAX_SIZES_CNT);
8491 for (size_t i = 0; i < count; i++) {
8492 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8493 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8494 if (temp_width * temp_height > max_jpeg_size ) {
8495 max_jpeg_size = temp_width * temp_height;
8496 }
8497 }
8498 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8499 return max_jpeg_size;
8500}
8501
8502/*===========================================================================
8503 * FUNCTION : getMaxRawSize
8504 *
8505 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8506 *
8507 * PARAMETERS :
8508 *
8509 * RETURN : Largest supported Raw Dimension
8510 *==========================================================================*/
8511cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8512{
8513 int max_width = 0;
8514 cam_dimension_t maxRawSize;
8515
8516 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8517 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8518 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8519 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8520 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8521 }
8522 }
8523 return maxRawSize;
8524}
8525
8526
8527/*===========================================================================
8528 * FUNCTION : calcMaxJpegDim
8529 *
8530 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8531 *
8532 * PARAMETERS :
8533 *
8534 * RETURN : max_jpeg_dim
8535 *==========================================================================*/
8536cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8537{
8538 cam_dimension_t max_jpeg_dim;
8539 cam_dimension_t curr_jpeg_dim;
8540 max_jpeg_dim.width = 0;
8541 max_jpeg_dim.height = 0;
8542 curr_jpeg_dim.width = 0;
8543 curr_jpeg_dim.height = 0;
8544 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8545 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8546 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8547 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8548 max_jpeg_dim.width * max_jpeg_dim.height ) {
8549 max_jpeg_dim.width = curr_jpeg_dim.width;
8550 max_jpeg_dim.height = curr_jpeg_dim.height;
8551 }
8552 }
8553 return max_jpeg_dim;
8554}
8555
8556/*===========================================================================
8557 * FUNCTION : addStreamConfig
8558 *
8559 * DESCRIPTION: adds the stream configuration to the array
8560 *
8561 * PARAMETERS :
8562 * @available_stream_configs : pointer to stream configuration array
8563 * @scalar_format : scalar format
8564 * @dim : configuration dimension
8565 * @config_type : input or output configuration type
8566 *
8567 * RETURN : NONE
8568 *==========================================================================*/
8569void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8570 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8571{
8572 available_stream_configs.add(scalar_format);
8573 available_stream_configs.add(dim.width);
8574 available_stream_configs.add(dim.height);
8575 available_stream_configs.add(config_type);
8576}
8577
8578/*===========================================================================
8579 * FUNCTION : suppportBurstCapture
8580 *
8581 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8582 *
8583 * PARAMETERS :
8584 * @cameraId : camera Id
8585 *
8586 * RETURN : true if camera supports BURST_CAPTURE
8587 * false otherwise
8588 *==========================================================================*/
8589bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8590{
8591 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8592 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8593 const int32_t highResWidth = 3264;
8594 const int32_t highResHeight = 2448;
8595
8596 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8597 // Maximum resolution images cannot be captured at >= 10fps
8598 // -> not supporting BURST_CAPTURE
8599 return false;
8600 }
8601
8602 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8603 // Maximum resolution images can be captured at >= 20fps
8604 // --> supporting BURST_CAPTURE
8605 return true;
8606 }
8607
8608 // Find the smallest highRes resolution, or largest resolution if there is none
8609 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8610 MAX_SIZES_CNT);
8611 size_t highRes = 0;
8612 while ((highRes + 1 < totalCnt) &&
8613 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8614 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8615 highResWidth * highResHeight)) {
8616 highRes++;
8617 }
8618 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8619 return true;
8620 } else {
8621 return false;
8622 }
8623}
8624
8625/*===========================================================================
8626 * FUNCTION : initStaticMetadata
8627 *
8628 * DESCRIPTION: initialize the static metadata
8629 *
8630 * PARAMETERS :
8631 * @cameraId : camera Id
8632 *
8633 * RETURN : int32_t type of status
8634 * 0 -- success
8635 * non-zero failure code
8636 *==========================================================================*/
8637int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8638{
8639 int rc = 0;
8640 CameraMetadata staticInfo;
8641 size_t count = 0;
8642 bool limitedDevice = false;
8643 char prop[PROPERTY_VALUE_MAX];
8644 bool supportBurst = false;
8645
8646 supportBurst = supportBurstCapture(cameraId);
8647
8648 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8649 * guaranteed or if min fps of max resolution is less than 20 fps, its
8650 * advertised as limited device*/
8651 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8652 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8653 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8654 !supportBurst;
8655
8656 uint8_t supportedHwLvl = limitedDevice ?
8657 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008658#ifndef USE_HAL_3_3
8659 // LEVEL_3 - This device will support level 3.
8660 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8661#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008662 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008663#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008664
8665 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8666 &supportedHwLvl, 1);
8667
8668 bool facingBack = false;
8669 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8670 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8671 facingBack = true;
8672 }
8673 /*HAL 3 only*/
8674 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8675 &gCamCapability[cameraId]->min_focus_distance, 1);
8676
8677 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8678 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8679
8680 /*should be using focal lengths but sensor doesn't provide that info now*/
8681 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8682 &gCamCapability[cameraId]->focal_length,
8683 1);
8684
8685 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8686 gCamCapability[cameraId]->apertures,
8687 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8688
8689 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8690 gCamCapability[cameraId]->filter_densities,
8691 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8692
8693
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008694 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8695 size_t mode_count =
8696 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8697 for (size_t i = 0; i < mode_count; i++) {
8698 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008700 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008701 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008702
8703 int32_t lens_shading_map_size[] = {
8704 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8705 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8706 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8707 lens_shading_map_size,
8708 sizeof(lens_shading_map_size)/sizeof(int32_t));
8709
8710 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8711 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8712
8713 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8714 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8715
8716 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8717 &gCamCapability[cameraId]->max_frame_duration, 1);
8718
8719 camera_metadata_rational baseGainFactor = {
8720 gCamCapability[cameraId]->base_gain_factor.numerator,
8721 gCamCapability[cameraId]->base_gain_factor.denominator};
8722 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8723 &baseGainFactor, 1);
8724
8725 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8726 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8727
8728 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8729 gCamCapability[cameraId]->pixel_array_size.height};
8730 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8731 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8732
8733 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8734 gCamCapability[cameraId]->active_array_size.top,
8735 gCamCapability[cameraId]->active_array_size.width,
8736 gCamCapability[cameraId]->active_array_size.height};
8737 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8738 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8739
8740 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8741 &gCamCapability[cameraId]->white_level, 1);
8742
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008743 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8744 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8745 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008746 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008747 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008748
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008749#ifndef USE_HAL_3_3
8750 bool hasBlackRegions = false;
8751 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8752 LOGW("black_region_count: %d is bounded to %d",
8753 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8754 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8755 }
8756 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8757 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8758 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8759 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8760 }
8761 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8762 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8763 hasBlackRegions = true;
8764 }
8765#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008766 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8767 &gCamCapability[cameraId]->flash_charge_duration, 1);
8768
8769 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8770 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8771
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008772 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8773 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8774 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008775 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8776 &timestampSource, 1);
8777
Thierry Strudel54dc9782017-02-15 12:12:10 -08008778 //update histogram vendor data
8779 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008780 &gCamCapability[cameraId]->histogram_size, 1);
8781
Thierry Strudel54dc9782017-02-15 12:12:10 -08008782 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008783 &gCamCapability[cameraId]->max_histogram_count, 1);
8784
Shuzhen Wang14415f52016-11-16 18:26:18 -08008785 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
8786 //so that app can request fewer number of bins than the maximum supported.
8787 std::vector<int32_t> histBins;
8788 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
8789 histBins.push_back(maxHistBins);
8790 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
8791 (maxHistBins & 0x1) == 0) {
8792 histBins.push_back(maxHistBins >> 1);
8793 maxHistBins >>= 1;
8794 }
8795 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
8796 histBins.data(), histBins.size());
8797
Thierry Strudel3d639192016-09-09 11:52:26 -07008798 int32_t sharpness_map_size[] = {
8799 gCamCapability[cameraId]->sharpness_map_size.width,
8800 gCamCapability[cameraId]->sharpness_map_size.height};
8801
8802 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8803 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8804
8805 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8806 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8807
8808 int32_t scalar_formats[] = {
8809 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8810 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8811 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8812 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8813 HAL_PIXEL_FORMAT_RAW10,
8814 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8815 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8816 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8817 scalar_formats,
8818 scalar_formats_count);
8819
8820 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8821 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8822 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8823 count, MAX_SIZES_CNT, available_processed_sizes);
8824 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8825 available_processed_sizes, count * 2);
8826
8827 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8828 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8829 makeTable(gCamCapability[cameraId]->raw_dim,
8830 count, MAX_SIZES_CNT, available_raw_sizes);
8831 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8832 available_raw_sizes, count * 2);
8833
8834 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8835 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8836 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8837 count, MAX_SIZES_CNT, available_fps_ranges);
8838 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8839 available_fps_ranges, count * 2);
8840
8841 camera_metadata_rational exposureCompensationStep = {
8842 gCamCapability[cameraId]->exp_compensation_step.numerator,
8843 gCamCapability[cameraId]->exp_compensation_step.denominator};
8844 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8845 &exposureCompensationStep, 1);
8846
8847 Vector<uint8_t> availableVstabModes;
8848 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8849 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008850 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008851 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008852 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008853 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008854 count = IS_TYPE_MAX;
8855 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8856 for (size_t i = 0; i < count; i++) {
8857 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8858 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8859 eisSupported = true;
8860 break;
8861 }
8862 }
8863 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008864 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8865 }
8866 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8867 availableVstabModes.array(), availableVstabModes.size());
8868
8869 /*HAL 1 and HAL 3 common*/
8870 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8871 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8872 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8873 float maxZoom = maxZoomStep/minZoomStep;
8874 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8875 &maxZoom, 1);
8876
8877 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8878 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8879
8880 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8881 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8882 max3aRegions[2] = 0; /* AF not supported */
8883 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8884 max3aRegions, 3);
8885
8886 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8887 memset(prop, 0, sizeof(prop));
8888 property_get("persist.camera.facedetect", prop, "1");
8889 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8890 LOGD("Support face detection mode: %d",
8891 supportedFaceDetectMode);
8892
8893 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008894 /* support mode should be OFF if max number of face is 0 */
8895 if (maxFaces <= 0) {
8896 supportedFaceDetectMode = 0;
8897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008898 Vector<uint8_t> availableFaceDetectModes;
8899 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8900 if (supportedFaceDetectMode == 1) {
8901 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8902 } else if (supportedFaceDetectMode == 2) {
8903 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8904 } else if (supportedFaceDetectMode == 3) {
8905 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8906 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8907 } else {
8908 maxFaces = 0;
8909 }
8910 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8911 availableFaceDetectModes.array(),
8912 availableFaceDetectModes.size());
8913 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8914 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008915 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8916 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8917 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008918
Emilian Peev7650c122017-01-19 08:24:33 -08008919#ifdef SUPPORT_DEPTH_DATA
Emilian Peev0ce959f2017-03-07 16:49:49 +00008920 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8921 //TODO: Update depth size accordingly, currently we use active array
8922 // as reference.
8923 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8924 int32_t depthHeight =
8925 gCamCapability[cameraId]->active_array_size.height;
8926 //As per spec. depth cloud should be sample count / 16
8927 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8928 assert(0 < depthSamplesCount);
8929 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
8930 &depthSamplesCount, 1);
Emilian Peev7650c122017-01-19 08:24:33 -08008931
Emilian Peev0ce959f2017-03-07 16:49:49 +00008932 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8933 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8934 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8935 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008936
Emilian Peev0ce959f2017-03-07 16:49:49 +00008937 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8938 1, 1 };
8939 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8940 depthMinDuration,
8941 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008942
Emilian Peev0ce959f2017-03-07 16:49:49 +00008943 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB,
8944 depthSamplesCount, 1, 0 };
8945 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8946 depthStallDuration,
8947 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
Emilian Peev7650c122017-01-19 08:24:33 -08008948
Emilian Peev0ce959f2017-03-07 16:49:49 +00008949 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8950 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8951 }
Emilian Peev7650c122017-01-19 08:24:33 -08008952#endif
8953
Thierry Strudel3d639192016-09-09 11:52:26 -07008954 int32_t exposureCompensationRange[] = {
8955 gCamCapability[cameraId]->exposure_compensation_min,
8956 gCamCapability[cameraId]->exposure_compensation_max};
8957 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8958 exposureCompensationRange,
8959 sizeof(exposureCompensationRange)/sizeof(int32_t));
8960
8961 uint8_t lensFacing = (facingBack) ?
8962 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8963 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8964
8965 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8966 available_thumbnail_sizes,
8967 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8968
8969 /*all sizes will be clubbed into this tag*/
8970 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8971 /*android.scaler.availableStreamConfigurations*/
8972 Vector<int32_t> available_stream_configs;
8973 cam_dimension_t active_array_dim;
8974 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8975 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08008976
8977 /*advertise list of input dimensions supported based on below property.
8978 By default all sizes upto 5MP will be advertised.
8979 Note that the setprop resolution format should be WxH.
8980 e.g: adb shell setprop persist.camera.input.minsize 1280x720
8981 To list all supported sizes, setprop needs to be set with "0x0" */
8982 cam_dimension_t minInputSize = {2592,1944}; //5MP
8983 memset(prop, 0, sizeof(prop));
8984 property_get("persist.camera.input.minsize", prop, "2592x1944");
8985 if (strlen(prop) > 0) {
8986 char *saveptr = NULL;
8987 char *token = strtok_r(prop, "x", &saveptr);
8988 if (token != NULL) {
8989 minInputSize.width = atoi(token);
8990 }
8991 token = strtok_r(NULL, "x", &saveptr);
8992 if (token != NULL) {
8993 minInputSize.height = atoi(token);
8994 }
8995 }
8996
Thierry Strudel3d639192016-09-09 11:52:26 -07008997 /* Add input/output stream configurations for each scalar formats*/
8998 for (size_t j = 0; j < scalar_formats_count; j++) {
8999 switch (scalar_formats[j]) {
9000 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9001 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9002 case HAL_PIXEL_FORMAT_RAW10:
9003 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9004 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9005 addStreamConfig(available_stream_configs, scalar_formats[j],
9006 gCamCapability[cameraId]->raw_dim[i],
9007 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9008 }
9009 break;
9010 case HAL_PIXEL_FORMAT_BLOB:
9011 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9012 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9013 addStreamConfig(available_stream_configs, scalar_formats[j],
9014 gCamCapability[cameraId]->picture_sizes_tbl[i],
9015 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9016 }
9017 break;
9018 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9019 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9020 default:
9021 cam_dimension_t largest_picture_size;
9022 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9023 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9024 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9025 addStreamConfig(available_stream_configs, scalar_formats[j],
9026 gCamCapability[cameraId]->picture_sizes_tbl[i],
9027 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009028 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9029 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9030 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9031 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9032 >= minInputSize.width) || (gCamCapability[cameraId]->
9033 picture_sizes_tbl[i].height >= minInputSize.height)) {
9034 addStreamConfig(available_stream_configs, scalar_formats[j],
9035 gCamCapability[cameraId]->picture_sizes_tbl[i],
9036 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9037 }
9038 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009039 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009040
Thierry Strudel3d639192016-09-09 11:52:26 -07009041 break;
9042 }
9043 }
9044
9045 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9046 available_stream_configs.array(), available_stream_configs.size());
9047 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9048 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9049
9050 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9051 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9052
9053 /* android.scaler.availableMinFrameDurations */
9054 Vector<int64_t> available_min_durations;
9055 for (size_t j = 0; j < scalar_formats_count; j++) {
9056 switch (scalar_formats[j]) {
9057 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9058 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9059 case HAL_PIXEL_FORMAT_RAW10:
9060 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9061 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9062 available_min_durations.add(scalar_formats[j]);
9063 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9064 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9065 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9066 }
9067 break;
9068 default:
9069 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9070 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9071 available_min_durations.add(scalar_formats[j]);
9072 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9073 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9074 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9075 }
9076 break;
9077 }
9078 }
9079 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9080 available_min_durations.array(), available_min_durations.size());
9081
9082 Vector<int32_t> available_hfr_configs;
9083 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9084 int32_t fps = 0;
9085 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9086 case CAM_HFR_MODE_60FPS:
9087 fps = 60;
9088 break;
9089 case CAM_HFR_MODE_90FPS:
9090 fps = 90;
9091 break;
9092 case CAM_HFR_MODE_120FPS:
9093 fps = 120;
9094 break;
9095 case CAM_HFR_MODE_150FPS:
9096 fps = 150;
9097 break;
9098 case CAM_HFR_MODE_180FPS:
9099 fps = 180;
9100 break;
9101 case CAM_HFR_MODE_210FPS:
9102 fps = 210;
9103 break;
9104 case CAM_HFR_MODE_240FPS:
9105 fps = 240;
9106 break;
9107 case CAM_HFR_MODE_480FPS:
9108 fps = 480;
9109 break;
9110 case CAM_HFR_MODE_OFF:
9111 case CAM_HFR_MODE_MAX:
9112 default:
9113 break;
9114 }
9115
9116 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9117 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9118 /* For each HFR frame rate, need to advertise one variable fps range
9119 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9120 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9121 * set by the app. When video recording is started, [120, 120] is
9122 * set. This way sensor configuration does not change when recording
9123 * is started */
9124
9125 /* (width, height, fps_min, fps_max, batch_size_max) */
9126 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9127 j < MAX_SIZES_CNT; j++) {
9128 available_hfr_configs.add(
9129 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9130 available_hfr_configs.add(
9131 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9132 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9133 available_hfr_configs.add(fps);
9134 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9135
9136 /* (width, height, fps_min, fps_max, batch_size_max) */
9137 available_hfr_configs.add(
9138 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9139 available_hfr_configs.add(
9140 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9141 available_hfr_configs.add(fps);
9142 available_hfr_configs.add(fps);
9143 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9144 }
9145 }
9146 }
9147 //Advertise HFR capability only if the property is set
9148 memset(prop, 0, sizeof(prop));
9149 property_get("persist.camera.hal3hfr.enable", prop, "1");
9150 uint8_t hfrEnable = (uint8_t)atoi(prop);
9151
9152 if(hfrEnable && available_hfr_configs.array()) {
9153 staticInfo.update(
9154 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9155 available_hfr_configs.array(), available_hfr_configs.size());
9156 }
9157
9158 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9159 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9160 &max_jpeg_size, 1);
9161
9162 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9163 size_t size = 0;
9164 count = CAM_EFFECT_MODE_MAX;
9165 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9166 for (size_t i = 0; i < count; i++) {
9167 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9168 gCamCapability[cameraId]->supported_effects[i]);
9169 if (NAME_NOT_FOUND != val) {
9170 avail_effects[size] = (uint8_t)val;
9171 size++;
9172 }
9173 }
9174 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9175 avail_effects,
9176 size);
9177
9178 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9179 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9180 size_t supported_scene_modes_cnt = 0;
9181 count = CAM_SCENE_MODE_MAX;
9182 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9183 for (size_t i = 0; i < count; i++) {
9184 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9185 CAM_SCENE_MODE_OFF) {
9186 int val = lookupFwkName(SCENE_MODES_MAP,
9187 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9188 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009189
Thierry Strudel3d639192016-09-09 11:52:26 -07009190 if (NAME_NOT_FOUND != val) {
9191 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9192 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9193 supported_scene_modes_cnt++;
9194 }
9195 }
9196 }
9197 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9198 avail_scene_modes,
9199 supported_scene_modes_cnt);
9200
9201 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9202 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9203 supported_scene_modes_cnt,
9204 CAM_SCENE_MODE_MAX,
9205 scene_mode_overrides,
9206 supported_indexes,
9207 cameraId);
9208
9209 if (supported_scene_modes_cnt == 0) {
9210 supported_scene_modes_cnt = 1;
9211 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9212 }
9213
9214 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9215 scene_mode_overrides, supported_scene_modes_cnt * 3);
9216
9217 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9218 ANDROID_CONTROL_MODE_AUTO,
9219 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9220 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9221 available_control_modes,
9222 3);
9223
9224 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9225 size = 0;
9226 count = CAM_ANTIBANDING_MODE_MAX;
9227 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9228 for (size_t i = 0; i < count; i++) {
9229 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9230 gCamCapability[cameraId]->supported_antibandings[i]);
9231 if (NAME_NOT_FOUND != val) {
9232 avail_antibanding_modes[size] = (uint8_t)val;
9233 size++;
9234 }
9235
9236 }
9237 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9238 avail_antibanding_modes,
9239 size);
9240
9241 uint8_t avail_abberation_modes[] = {
9242 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9243 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9244 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9245 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9246 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9247 if (0 == count) {
9248 // If no aberration correction modes are available for a device, this advertise OFF mode
9249 size = 1;
9250 } else {
9251 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9252 // So, advertize all 3 modes if atleast any one mode is supported as per the
9253 // new M requirement
9254 size = 3;
9255 }
9256 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9257 avail_abberation_modes,
9258 size);
9259
9260 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9261 size = 0;
9262 count = CAM_FOCUS_MODE_MAX;
9263 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9264 for (size_t i = 0; i < count; i++) {
9265 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9266 gCamCapability[cameraId]->supported_focus_modes[i]);
9267 if (NAME_NOT_FOUND != val) {
9268 avail_af_modes[size] = (uint8_t)val;
9269 size++;
9270 }
9271 }
9272 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9273 avail_af_modes,
9274 size);
9275
9276 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9277 size = 0;
9278 count = CAM_WB_MODE_MAX;
9279 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9280 for (size_t i = 0; i < count; i++) {
9281 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9282 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9283 gCamCapability[cameraId]->supported_white_balances[i]);
9284 if (NAME_NOT_FOUND != val) {
9285 avail_awb_modes[size] = (uint8_t)val;
9286 size++;
9287 }
9288 }
9289 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9290 avail_awb_modes,
9291 size);
9292
9293 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9294 count = CAM_FLASH_FIRING_LEVEL_MAX;
9295 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9296 count);
9297 for (size_t i = 0; i < count; i++) {
9298 available_flash_levels[i] =
9299 gCamCapability[cameraId]->supported_firing_levels[i];
9300 }
9301 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9302 available_flash_levels, count);
9303
9304 uint8_t flashAvailable;
9305 if (gCamCapability[cameraId]->flash_available)
9306 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9307 else
9308 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9309 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9310 &flashAvailable, 1);
9311
9312 Vector<uint8_t> avail_ae_modes;
9313 count = CAM_AE_MODE_MAX;
9314 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9315 for (size_t i = 0; i < count; i++) {
9316 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9317 }
9318 if (flashAvailable) {
9319 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9320 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009321 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009322 }
9323 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9324 avail_ae_modes.array(),
9325 avail_ae_modes.size());
9326
9327 int32_t sensitivity_range[2];
9328 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9329 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9330 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9331 sensitivity_range,
9332 sizeof(sensitivity_range) / sizeof(int32_t));
9333
9334 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9335 &gCamCapability[cameraId]->max_analog_sensitivity,
9336 1);
9337
9338 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9339 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9340 &sensor_orientation,
9341 1);
9342
9343 int32_t max_output_streams[] = {
9344 MAX_STALLING_STREAMS,
9345 MAX_PROCESSED_STREAMS,
9346 MAX_RAW_STREAMS};
9347 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9348 max_output_streams,
9349 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9350
9351 uint8_t avail_leds = 0;
9352 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9353 &avail_leds, 0);
9354
9355 uint8_t focus_dist_calibrated;
9356 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9357 gCamCapability[cameraId]->focus_dist_calibrated);
9358 if (NAME_NOT_FOUND != val) {
9359 focus_dist_calibrated = (uint8_t)val;
9360 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9361 &focus_dist_calibrated, 1);
9362 }
9363
9364 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9365 size = 0;
9366 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9367 MAX_TEST_PATTERN_CNT);
9368 for (size_t i = 0; i < count; i++) {
9369 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9370 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9371 if (NAME_NOT_FOUND != testpatternMode) {
9372 avail_testpattern_modes[size] = testpatternMode;
9373 size++;
9374 }
9375 }
9376 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9377 avail_testpattern_modes,
9378 size);
9379
9380 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9381 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9382 &max_pipeline_depth,
9383 1);
9384
9385 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9386 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9387 &partial_result_count,
9388 1);
9389
9390 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9391 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9392
9393 Vector<uint8_t> available_capabilities;
9394 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9395 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9396 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9397 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9398 if (supportBurst) {
9399 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9400 }
9401 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9402 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9403 if (hfrEnable && available_hfr_configs.array()) {
9404 available_capabilities.add(
9405 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9406 }
9407
9408 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9409 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9410 }
9411 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9412 available_capabilities.array(),
9413 available_capabilities.size());
9414
9415 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9416 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9417 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9418 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9419
9420 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9421 &aeLockAvailable, 1);
9422
9423 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9424 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9425 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9426 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9427
9428 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9429 &awbLockAvailable, 1);
9430
9431 int32_t max_input_streams = 1;
9432 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9433 &max_input_streams,
9434 1);
9435
9436 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9437 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9438 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9439 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9440 HAL_PIXEL_FORMAT_YCbCr_420_888};
9441 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9442 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9443
9444 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9445 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9446 &max_latency,
9447 1);
9448
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009449#ifndef USE_HAL_3_3
9450 int32_t isp_sensitivity_range[2];
9451 isp_sensitivity_range[0] =
9452 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9453 isp_sensitivity_range[1] =
9454 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9455 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9456 isp_sensitivity_range,
9457 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9458#endif
9459
Thierry Strudel3d639192016-09-09 11:52:26 -07009460 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9461 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9462 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9463 available_hot_pixel_modes,
9464 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9465
9466 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9467 ANDROID_SHADING_MODE_FAST,
9468 ANDROID_SHADING_MODE_HIGH_QUALITY};
9469 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9470 available_shading_modes,
9471 3);
9472
9473 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9474 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9475 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9476 available_lens_shading_map_modes,
9477 2);
9478
9479 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9480 ANDROID_EDGE_MODE_FAST,
9481 ANDROID_EDGE_MODE_HIGH_QUALITY,
9482 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9483 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9484 available_edge_modes,
9485 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9486
9487 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9488 ANDROID_NOISE_REDUCTION_MODE_FAST,
9489 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9490 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9491 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9492 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9493 available_noise_red_modes,
9494 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9495
9496 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9497 ANDROID_TONEMAP_MODE_FAST,
9498 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9499 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9500 available_tonemap_modes,
9501 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9502
9503 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9504 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9505 available_hot_pixel_map_modes,
9506 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9507
9508 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9509 gCamCapability[cameraId]->reference_illuminant1);
9510 if (NAME_NOT_FOUND != val) {
9511 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9512 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9513 }
9514
9515 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9516 gCamCapability[cameraId]->reference_illuminant2);
9517 if (NAME_NOT_FOUND != val) {
9518 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9519 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9520 }
9521
9522 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9523 (void *)gCamCapability[cameraId]->forward_matrix1,
9524 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9525
9526 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9527 (void *)gCamCapability[cameraId]->forward_matrix2,
9528 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9529
9530 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9531 (void *)gCamCapability[cameraId]->color_transform1,
9532 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9533
9534 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9535 (void *)gCamCapability[cameraId]->color_transform2,
9536 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9537
9538 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9539 (void *)gCamCapability[cameraId]->calibration_transform1,
9540 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9541
9542 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9543 (void *)gCamCapability[cameraId]->calibration_transform2,
9544 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9545
9546 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9547 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9548 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9549 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9550 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9551 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9552 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9553 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9554 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9555 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9556 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9557 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9558 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9559 ANDROID_JPEG_GPS_COORDINATES,
9560 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9561 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9562 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9563 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9564 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9565 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9566 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9567 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9568 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9569 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009570#ifndef USE_HAL_3_3
9571 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9572#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009573 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009574 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009575 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9576 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009577 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009578 /* DevCamDebug metadata request_keys_basic */
9579 DEVCAMDEBUG_META_ENABLE,
9580 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009581 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9582 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009583 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009584
9585 size_t request_keys_cnt =
9586 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9587 Vector<int32_t> available_request_keys;
9588 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9589 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9590 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9591 }
9592
9593 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9594 available_request_keys.array(), available_request_keys.size());
9595
9596 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9597 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9598 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9599 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9600 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9601 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9602 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9603 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9604 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9605 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9606 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9607 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9608 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9609 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9610 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9611 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9612 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009613 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009614 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9615 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9616 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009617 ANDROID_STATISTICS_FACE_SCORES,
9618#ifndef USE_HAL_3_3
9619 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9620#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009621 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009622 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009623 // DevCamDebug metadata result_keys_basic
9624 DEVCAMDEBUG_META_ENABLE,
9625 // DevCamDebug metadata result_keys AF
9626 DEVCAMDEBUG_AF_LENS_POSITION,
9627 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9628 DEVCAMDEBUG_AF_TOF_DISTANCE,
9629 DEVCAMDEBUG_AF_LUMA,
9630 DEVCAMDEBUG_AF_HAF_STATE,
9631 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9632 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9633 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9634 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9635 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9636 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9637 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9638 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9639 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9640 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9641 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9642 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9643 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9644 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9645 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9646 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9647 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9648 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9649 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9650 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9651 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9652 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9653 // DevCamDebug metadata result_keys AEC
9654 DEVCAMDEBUG_AEC_TARGET_LUMA,
9655 DEVCAMDEBUG_AEC_COMP_LUMA,
9656 DEVCAMDEBUG_AEC_AVG_LUMA,
9657 DEVCAMDEBUG_AEC_CUR_LUMA,
9658 DEVCAMDEBUG_AEC_LINECOUNT,
9659 DEVCAMDEBUG_AEC_REAL_GAIN,
9660 DEVCAMDEBUG_AEC_EXP_INDEX,
9661 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009662 // DevCamDebug metadata result_keys zzHDR
9663 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9664 DEVCAMDEBUG_AEC_L_LINECOUNT,
9665 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9666 DEVCAMDEBUG_AEC_S_LINECOUNT,
9667 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9668 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9669 // DevCamDebug metadata result_keys ADRC
9670 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9671 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9672 DEVCAMDEBUG_AEC_GTM_RATIO,
9673 DEVCAMDEBUG_AEC_LTM_RATIO,
9674 DEVCAMDEBUG_AEC_LA_RATIO,
9675 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009676 // DevCamDebug metadata result_keys AWB
9677 DEVCAMDEBUG_AWB_R_GAIN,
9678 DEVCAMDEBUG_AWB_G_GAIN,
9679 DEVCAMDEBUG_AWB_B_GAIN,
9680 DEVCAMDEBUG_AWB_CCT,
9681 DEVCAMDEBUG_AWB_DECISION,
9682 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009683 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9684 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9685 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009686 };
9687
Thierry Strudel3d639192016-09-09 11:52:26 -07009688 size_t result_keys_cnt =
9689 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9690
9691 Vector<int32_t> available_result_keys;
9692 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9693 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9694 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9695 }
9696 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9697 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9698 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9699 }
9700 if (supportedFaceDetectMode == 1) {
9701 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9702 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9703 } else if ((supportedFaceDetectMode == 2) ||
9704 (supportedFaceDetectMode == 3)) {
9705 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9706 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9707 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009708#ifndef USE_HAL_3_3
9709 if (hasBlackRegions) {
9710 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9711 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9712 }
9713#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009714 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9715 available_result_keys.array(), available_result_keys.size());
9716
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009717 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009718 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9719 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9720 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9721 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9722 ANDROID_SCALER_CROPPING_TYPE,
9723 ANDROID_SYNC_MAX_LATENCY,
9724 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9725 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9726 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9727 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9728 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9729 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9730 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9731 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9732 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9733 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9734 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9735 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9736 ANDROID_LENS_FACING,
9737 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9738 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9739 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9740 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9741 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9742 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9743 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9744 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9745 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9746 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9747 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9748 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9749 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9750 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9751 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9752 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9753 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9754 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9755 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9756 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009757 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009758 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9759 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9760 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9761 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9762 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9763 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9764 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9765 ANDROID_CONTROL_AVAILABLE_MODES,
9766 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9767 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9768 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9769 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009770 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009771#ifdef SUPPORT_DEPTH_DATA
9772 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9773 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9774 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9775 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9776 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9777#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009778#ifndef USE_HAL_3_3
9779 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9780 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9781#endif
9782 };
9783
9784 Vector<int32_t> available_characteristics_keys;
9785 available_characteristics_keys.appendArray(characteristics_keys_basic,
9786 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9787#ifndef USE_HAL_3_3
9788 if (hasBlackRegions) {
9789 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9790 }
9791#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009792 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009793 available_characteristics_keys.array(),
9794 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009795
9796 /*available stall durations depend on the hw + sw and will be different for different devices */
9797 /*have to add for raw after implementation*/
9798 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9799 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9800
9801 Vector<int64_t> available_stall_durations;
9802 for (uint32_t j = 0; j < stall_formats_count; j++) {
9803 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9804 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9805 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9806 available_stall_durations.add(stall_formats[j]);
9807 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9808 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9809 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9810 }
9811 } else {
9812 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9813 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9814 available_stall_durations.add(stall_formats[j]);
9815 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9816 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9817 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9818 }
9819 }
9820 }
9821 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9822 available_stall_durations.array(),
9823 available_stall_durations.size());
9824
9825 //QCAMERA3_OPAQUE_RAW
9826 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9827 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9828 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9829 case LEGACY_RAW:
9830 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9831 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9832 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9833 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9834 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9835 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9836 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9837 break;
9838 case MIPI_RAW:
9839 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9840 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9841 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9842 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9843 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9844 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9845 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9846 break;
9847 default:
9848 LOGE("unknown opaque_raw_format %d",
9849 gCamCapability[cameraId]->opaque_raw_fmt);
9850 break;
9851 }
9852 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9853
9854 Vector<int32_t> strides;
9855 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9856 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9857 cam_stream_buf_plane_info_t buf_planes;
9858 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9859 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9860 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9861 &gCamCapability[cameraId]->padding_info, &buf_planes);
9862 strides.add(buf_planes.plane_info.mp[0].stride);
9863 }
9864 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9865 strides.size());
9866
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009867 //TBD: remove the following line once backend advertises zzHDR in feature mask
9868 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009869 //Video HDR default
9870 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9871 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009872 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009873 int32_t vhdr_mode[] = {
9874 QCAMERA3_VIDEO_HDR_MODE_OFF,
9875 QCAMERA3_VIDEO_HDR_MODE_ON};
9876
9877 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9878 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9879 vhdr_mode, vhdr_mode_count);
9880 }
9881
Thierry Strudel3d639192016-09-09 11:52:26 -07009882 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9883 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9884 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9885
9886 uint8_t isMonoOnly =
9887 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9888 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9889 &isMonoOnly, 1);
9890
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009891#ifndef USE_HAL_3_3
9892 Vector<int32_t> opaque_size;
9893 for (size_t j = 0; j < scalar_formats_count; j++) {
9894 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9895 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9896 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9897 cam_stream_buf_plane_info_t buf_planes;
9898
9899 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9900 &gCamCapability[cameraId]->padding_info, &buf_planes);
9901
9902 if (rc == 0) {
9903 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9904 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9905 opaque_size.add(buf_planes.plane_info.frame_len);
9906 }else {
9907 LOGE("raw frame calculation failed!");
9908 }
9909 }
9910 }
9911 }
9912
9913 if ((opaque_size.size() > 0) &&
9914 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9915 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9916 else
9917 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9918#endif
9919
Thierry Strudel04e026f2016-10-10 11:27:36 -07009920 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9921 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9922 size = 0;
9923 count = CAM_IR_MODE_MAX;
9924 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9925 for (size_t i = 0; i < count; i++) {
9926 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9927 gCamCapability[cameraId]->supported_ir_modes[i]);
9928 if (NAME_NOT_FOUND != val) {
9929 avail_ir_modes[size] = (int32_t)val;
9930 size++;
9931 }
9932 }
9933 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9934 avail_ir_modes, size);
9935 }
9936
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009937 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9938 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9939 size = 0;
9940 count = CAM_AEC_CONVERGENCE_MAX;
9941 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9942 for (size_t i = 0; i < count; i++) {
9943 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9944 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9945 if (NAME_NOT_FOUND != val) {
9946 available_instant_aec_modes[size] = (int32_t)val;
9947 size++;
9948 }
9949 }
9950 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9951 available_instant_aec_modes, size);
9952 }
9953
Thierry Strudel54dc9782017-02-15 12:12:10 -08009954 int32_t sharpness_range[] = {
9955 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9956 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9957 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9958
9959 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9960 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9961 size = 0;
9962 count = CAM_BINNING_CORRECTION_MODE_MAX;
9963 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9964 for (size_t i = 0; i < count; i++) {
9965 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9966 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9967 gCamCapability[cameraId]->supported_binning_modes[i]);
9968 if (NAME_NOT_FOUND != val) {
9969 avail_binning_modes[size] = (int32_t)val;
9970 size++;
9971 }
9972 }
9973 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9974 avail_binning_modes, size);
9975 }
9976
9977 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9978 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9979 size = 0;
9980 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9981 for (size_t i = 0; i < count; i++) {
9982 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9983 gCamCapability[cameraId]->supported_aec_modes[i]);
9984 if (NAME_NOT_FOUND != val)
9985 available_aec_modes[size++] = val;
9986 }
9987 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
9988 available_aec_modes, size);
9989 }
9990
9991 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
9992 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
9993 size = 0;
9994 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
9995 for (size_t i = 0; i < count; i++) {
9996 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
9997 gCamCapability[cameraId]->supported_iso_modes[i]);
9998 if (NAME_NOT_FOUND != val)
9999 available_iso_modes[size++] = val;
10000 }
10001 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10002 available_iso_modes, size);
10003 }
10004
10005 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10006 for (size_t i = 0; i < count; i++)
10007 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10008 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10009 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10010
10011 int32_t available_saturation_range[4];
10012 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10013 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10014 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10015 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10016 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10017 available_saturation_range, 4);
10018
10019 uint8_t is_hdr_values[2];
10020 is_hdr_values[0] = 0;
10021 is_hdr_values[1] = 1;
10022 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10023 is_hdr_values, 2);
10024
10025 float is_hdr_confidence_range[2];
10026 is_hdr_confidence_range[0] = 0.0;
10027 is_hdr_confidence_range[1] = 1.0;
10028 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10029 is_hdr_confidence_range, 2);
10030
Emilian Peev0a972ef2017-03-16 10:25:53 +000010031 size_t eepromLength = strnlen(
10032 reinterpret_cast<const char *>(
10033 gCamCapability[cameraId]->eeprom_version_info),
10034 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10035 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010036 char easelInfo[] = ",E:N";
10037 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10038 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10039 eepromLength += sizeof(easelInfo);
10040 strlcat(eepromInfo, (gHdrPlusClient ? ",E:Y" : ",E:N"), MAX_EEPROM_VERSION_INFO_LEN);
10041 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010042 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10043 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10044 }
10045
Thierry Strudel3d639192016-09-09 11:52:26 -070010046 gStaticMetadata[cameraId] = staticInfo.release();
10047 return rc;
10048}
10049
10050/*===========================================================================
10051 * FUNCTION : makeTable
10052 *
10053 * DESCRIPTION: make a table of sizes
10054 *
10055 * PARAMETERS :
10056 *
10057 *
10058 *==========================================================================*/
10059void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10060 size_t max_size, int32_t *sizeTable)
10061{
10062 size_t j = 0;
10063 if (size > max_size) {
10064 size = max_size;
10065 }
10066 for (size_t i = 0; i < size; i++) {
10067 sizeTable[j] = dimTable[i].width;
10068 sizeTable[j+1] = dimTable[i].height;
10069 j+=2;
10070 }
10071}
10072
10073/*===========================================================================
10074 * FUNCTION : makeFPSTable
10075 *
10076 * DESCRIPTION: make a table of fps ranges
10077 *
10078 * PARAMETERS :
10079 *
10080 *==========================================================================*/
10081void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10082 size_t max_size, int32_t *fpsRangesTable)
10083{
10084 size_t j = 0;
10085 if (size > max_size) {
10086 size = max_size;
10087 }
10088 for (size_t i = 0; i < size; i++) {
10089 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10090 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10091 j+=2;
10092 }
10093}
10094
10095/*===========================================================================
10096 * FUNCTION : makeOverridesList
10097 *
10098 * DESCRIPTION: make a list of scene mode overrides
10099 *
10100 * PARAMETERS :
10101 *
10102 *
10103 *==========================================================================*/
10104void QCamera3HardwareInterface::makeOverridesList(
10105 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10106 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10107{
10108 /*daemon will give a list of overrides for all scene modes.
10109 However we should send the fwk only the overrides for the scene modes
10110 supported by the framework*/
10111 size_t j = 0;
10112 if (size > max_size) {
10113 size = max_size;
10114 }
10115 size_t focus_count = CAM_FOCUS_MODE_MAX;
10116 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10117 focus_count);
10118 for (size_t i = 0; i < size; i++) {
10119 bool supt = false;
10120 size_t index = supported_indexes[i];
10121 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10122 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10123 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10124 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10125 overridesTable[index].awb_mode);
10126 if (NAME_NOT_FOUND != val) {
10127 overridesList[j+1] = (uint8_t)val;
10128 }
10129 uint8_t focus_override = overridesTable[index].af_mode;
10130 for (size_t k = 0; k < focus_count; k++) {
10131 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10132 supt = true;
10133 break;
10134 }
10135 }
10136 if (supt) {
10137 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10138 focus_override);
10139 if (NAME_NOT_FOUND != val) {
10140 overridesList[j+2] = (uint8_t)val;
10141 }
10142 } else {
10143 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10144 }
10145 j+=3;
10146 }
10147}
10148
10149/*===========================================================================
10150 * FUNCTION : filterJpegSizes
10151 *
10152 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10153 * could be downscaled to
10154 *
10155 * PARAMETERS :
10156 *
10157 * RETURN : length of jpegSizes array
10158 *==========================================================================*/
10159
10160size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10161 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10162 uint8_t downscale_factor)
10163{
10164 if (0 == downscale_factor) {
10165 downscale_factor = 1;
10166 }
10167
10168 int32_t min_width = active_array_size.width / downscale_factor;
10169 int32_t min_height = active_array_size.height / downscale_factor;
10170 size_t jpegSizesCnt = 0;
10171 if (processedSizesCnt > maxCount) {
10172 processedSizesCnt = maxCount;
10173 }
10174 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10175 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10176 jpegSizes[jpegSizesCnt] = processedSizes[i];
10177 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10178 jpegSizesCnt += 2;
10179 }
10180 }
10181 return jpegSizesCnt;
10182}
10183
10184/*===========================================================================
10185 * FUNCTION : computeNoiseModelEntryS
10186 *
10187 * DESCRIPTION: function to map a given sensitivity to the S noise
10188 * model parameters in the DNG noise model.
10189 *
10190 * PARAMETERS : sens : the sensor sensitivity
10191 *
10192 ** RETURN : S (sensor amplification) noise
10193 *
10194 *==========================================================================*/
10195double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10196 double s = gCamCapability[mCameraId]->gradient_S * sens +
10197 gCamCapability[mCameraId]->offset_S;
10198 return ((s < 0.0) ? 0.0 : s);
10199}
10200
10201/*===========================================================================
10202 * FUNCTION : computeNoiseModelEntryO
10203 *
10204 * DESCRIPTION: function to map a given sensitivity to the O noise
10205 * model parameters in the DNG noise model.
10206 *
10207 * PARAMETERS : sens : the sensor sensitivity
10208 *
10209 ** RETURN : O (sensor readout) noise
10210 *
10211 *==========================================================================*/
10212double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10213 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10214 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10215 1.0 : (1.0 * sens / max_analog_sens);
10216 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10217 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10218 return ((o < 0.0) ? 0.0 : o);
10219}
10220
10221/*===========================================================================
10222 * FUNCTION : getSensorSensitivity
10223 *
10224 * DESCRIPTION: convert iso_mode to an integer value
10225 *
10226 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10227 *
10228 ** RETURN : sensitivity supported by sensor
10229 *
10230 *==========================================================================*/
10231int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10232{
10233 int32_t sensitivity;
10234
10235 switch (iso_mode) {
10236 case CAM_ISO_MODE_100:
10237 sensitivity = 100;
10238 break;
10239 case CAM_ISO_MODE_200:
10240 sensitivity = 200;
10241 break;
10242 case CAM_ISO_MODE_400:
10243 sensitivity = 400;
10244 break;
10245 case CAM_ISO_MODE_800:
10246 sensitivity = 800;
10247 break;
10248 case CAM_ISO_MODE_1600:
10249 sensitivity = 1600;
10250 break;
10251 default:
10252 sensitivity = -1;
10253 break;
10254 }
10255 return sensitivity;
10256}
10257
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010258int QCamera3HardwareInterface::initHdrPlusClientLocked() {
10259 if (gHdrPlusClient != nullptr) {
10260 return OK;
10261 }
10262
10263 gHdrPlusClient = std::make_shared<HdrPlusClient>();
10264 if (gHdrPlusClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010265 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10266 // to connect to Easel.
10267 bool doNotpowerOnEasel =
10268 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10269
10270 if (doNotpowerOnEasel) {
10271 gHdrPlusClient = nullptr;
10272 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10273 return OK;
10274 }
10275
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010276 // If Easel is present, power on Easel and suspend it immediately.
10277 status_t res = gHdrPlusClient->powerOnEasel();
10278 if (res != OK) {
10279 ALOGE("%s: Enabling Easel bypass failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10280 gHdrPlusClient = nullptr;
10281 return res;
10282 }
10283
10284 res = gHdrPlusClient->suspendEasel();
10285 if (res != OK) {
10286 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10287 }
10288
10289 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
10290 } else {
10291 // Destroy HDR+ client if Easel isn't present.
10292 gHdrPlusClient = nullptr;
10293 }
10294
10295 return OK;
10296}
10297
Thierry Strudel3d639192016-09-09 11:52:26 -070010298/*===========================================================================
10299 * FUNCTION : getCamInfo
10300 *
10301 * DESCRIPTION: query camera capabilities
10302 *
10303 * PARAMETERS :
10304 * @cameraId : camera Id
10305 * @info : camera info struct to be filled in with camera capabilities
10306 *
10307 * RETURN : int type of status
10308 * NO_ERROR -- success
10309 * none-zero failure code
10310 *==========================================================================*/
10311int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10312 struct camera_info *info)
10313{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010314 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010315 int rc = 0;
10316
10317 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010318
10319 rc = initHdrPlusClientLocked();
10320 if (rc != OK) {
10321 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10322 pthread_mutex_unlock(&gCamLock);
10323 return rc;
10324 }
10325
Thierry Strudel3d639192016-09-09 11:52:26 -070010326 if (NULL == gCamCapability[cameraId]) {
10327 rc = initCapabilities(cameraId);
10328 if (rc < 0) {
10329 pthread_mutex_unlock(&gCamLock);
10330 return rc;
10331 }
10332 }
10333
10334 if (NULL == gStaticMetadata[cameraId]) {
10335 rc = initStaticMetadata(cameraId);
10336 if (rc < 0) {
10337 pthread_mutex_unlock(&gCamLock);
10338 return rc;
10339 }
10340 }
10341
10342 switch(gCamCapability[cameraId]->position) {
10343 case CAM_POSITION_BACK:
10344 case CAM_POSITION_BACK_AUX:
10345 info->facing = CAMERA_FACING_BACK;
10346 break;
10347
10348 case CAM_POSITION_FRONT:
10349 case CAM_POSITION_FRONT_AUX:
10350 info->facing = CAMERA_FACING_FRONT;
10351 break;
10352
10353 default:
10354 LOGE("Unknown position type %d for camera id:%d",
10355 gCamCapability[cameraId]->position, cameraId);
10356 rc = -1;
10357 break;
10358 }
10359
10360
10361 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010362#ifndef USE_HAL_3_3
10363 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10364#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010365 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010366#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010367 info->static_camera_characteristics = gStaticMetadata[cameraId];
10368
10369 //For now assume both cameras can operate independently.
10370 info->conflicting_devices = NULL;
10371 info->conflicting_devices_length = 0;
10372
10373 //resource cost is 100 * MIN(1.0, m/M),
10374 //where m is throughput requirement with maximum stream configuration
10375 //and M is CPP maximum throughput.
10376 float max_fps = 0.0;
10377 for (uint32_t i = 0;
10378 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10379 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10380 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10381 }
10382 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10383 gCamCapability[cameraId]->active_array_size.width *
10384 gCamCapability[cameraId]->active_array_size.height * max_fps /
10385 gCamCapability[cameraId]->max_pixel_bandwidth;
10386 info->resource_cost = 100 * MIN(1.0, ratio);
10387 LOGI("camera %d resource cost is %d", cameraId,
10388 info->resource_cost);
10389
10390 pthread_mutex_unlock(&gCamLock);
10391 return rc;
10392}
10393
10394/*===========================================================================
10395 * FUNCTION : translateCapabilityToMetadata
10396 *
10397 * DESCRIPTION: translate the capability into camera_metadata_t
10398 *
10399 * PARAMETERS : type of the request
10400 *
10401 *
10402 * RETURN : success: camera_metadata_t*
10403 * failure: NULL
10404 *
10405 *==========================================================================*/
10406camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10407{
10408 if (mDefaultMetadata[type] != NULL) {
10409 return mDefaultMetadata[type];
10410 }
10411 //first time we are handling this request
10412 //fill up the metadata structure using the wrapper class
10413 CameraMetadata settings;
10414 //translate from cam_capability_t to camera_metadata_tag_t
10415 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10416 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10417 int32_t defaultRequestID = 0;
10418 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10419
10420 /* OIS disable */
10421 char ois_prop[PROPERTY_VALUE_MAX];
10422 memset(ois_prop, 0, sizeof(ois_prop));
10423 property_get("persist.camera.ois.disable", ois_prop, "0");
10424 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10425
10426 /* Force video to use OIS */
10427 char videoOisProp[PROPERTY_VALUE_MAX];
10428 memset(videoOisProp, 0, sizeof(videoOisProp));
10429 property_get("persist.camera.ois.video", videoOisProp, "1");
10430 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010431
10432 // Hybrid AE enable/disable
10433 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10434 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10435 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10436 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10437
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 uint8_t controlIntent = 0;
10439 uint8_t focusMode;
10440 uint8_t vsMode;
10441 uint8_t optStabMode;
10442 uint8_t cacMode;
10443 uint8_t edge_mode;
10444 uint8_t noise_red_mode;
10445 uint8_t tonemap_mode;
10446 bool highQualityModeEntryAvailable = FALSE;
10447 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010448 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010449 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10450 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010451 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010452
Thierry Strudel3d639192016-09-09 11:52:26 -070010453 switch (type) {
10454 case CAMERA3_TEMPLATE_PREVIEW:
10455 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10456 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10457 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10458 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10459 edge_mode = ANDROID_EDGE_MODE_FAST;
10460 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10461 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10462 break;
10463 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10464 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10465 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10466 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10467 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10468 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10469 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10470 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10471 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10472 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10473 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10474 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10475 highQualityModeEntryAvailable = TRUE;
10476 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10477 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10478 fastModeEntryAvailable = TRUE;
10479 }
10480 }
10481 if (highQualityModeEntryAvailable) {
10482 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10483 } else if (fastModeEntryAvailable) {
10484 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10485 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010486 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10487 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10488 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010489 break;
10490 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10491 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10492 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10493 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010494 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10495 edge_mode = ANDROID_EDGE_MODE_FAST;
10496 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10497 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10498 if (forceVideoOis)
10499 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10500 break;
10501 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10502 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10503 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10504 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010505 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10506 edge_mode = ANDROID_EDGE_MODE_FAST;
10507 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10508 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10509 if (forceVideoOis)
10510 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10511 break;
10512 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10513 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10514 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10515 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10516 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10517 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10518 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10519 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10520 break;
10521 case CAMERA3_TEMPLATE_MANUAL:
10522 edge_mode = ANDROID_EDGE_MODE_FAST;
10523 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10524 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10525 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10526 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10527 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10528 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10529 break;
10530 default:
10531 edge_mode = ANDROID_EDGE_MODE_FAST;
10532 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10533 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10534 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10535 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10536 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10537 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10538 break;
10539 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010540 // Set CAC to OFF if underlying device doesn't support
10541 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10542 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10543 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010544 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10545 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10546 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10547 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10548 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10549 }
10550 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010551 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010552
10553 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10554 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10555 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10556 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10557 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10558 || ois_disable)
10559 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10560 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010561 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010562
10563 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10564 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10565
10566 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10567 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10568
10569 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10570 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10571
10572 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10573 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10574
10575 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10576 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10577
10578 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10579 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10580
10581 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10582 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10583
10584 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10585 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10586
10587 /*flash*/
10588 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10589 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10590
10591 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10592 settings.update(ANDROID_FLASH_FIRING_POWER,
10593 &flashFiringLevel, 1);
10594
10595 /* lens */
10596 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10597 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10598
10599 if (gCamCapability[mCameraId]->filter_densities_count) {
10600 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10601 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10602 gCamCapability[mCameraId]->filter_densities_count);
10603 }
10604
10605 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10606 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10607
Thierry Strudel3d639192016-09-09 11:52:26 -070010608 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10609 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10610
10611 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10612 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10613
10614 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10615 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10616
10617 /* face detection (default to OFF) */
10618 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10619 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10620
Thierry Strudel54dc9782017-02-15 12:12:10 -080010621 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10622 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010623
10624 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10625 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10626
10627 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10628 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10629
Thierry Strudel3d639192016-09-09 11:52:26 -070010630
10631 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10632 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10633
10634 /* Exposure time(Update the Min Exposure Time)*/
10635 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10636 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10637
10638 /* frame duration */
10639 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10640 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10641
10642 /* sensitivity */
10643 static const int32_t default_sensitivity = 100;
10644 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010645#ifndef USE_HAL_3_3
10646 static const int32_t default_isp_sensitivity =
10647 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10648 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10649#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010650
10651 /*edge mode*/
10652 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10653
10654 /*noise reduction mode*/
10655 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10656
10657 /*color correction mode*/
10658 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10659 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10660
10661 /*transform matrix mode*/
10662 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10663
10664 int32_t scaler_crop_region[4];
10665 scaler_crop_region[0] = 0;
10666 scaler_crop_region[1] = 0;
10667 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10668 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10669 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10670
10671 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10672 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10673
10674 /*focus distance*/
10675 float focus_distance = 0.0;
10676 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10677
10678 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010679 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010680 float max_range = 0.0;
10681 float max_fixed_fps = 0.0;
10682 int32_t fps_range[2] = {0, 0};
10683 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10684 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010685 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10686 TEMPLATE_MAX_PREVIEW_FPS) {
10687 continue;
10688 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010689 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10690 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10691 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10692 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10693 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10694 if (range > max_range) {
10695 fps_range[0] =
10696 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10697 fps_range[1] =
10698 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10699 max_range = range;
10700 }
10701 } else {
10702 if (range < 0.01 && max_fixed_fps <
10703 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10704 fps_range[0] =
10705 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10706 fps_range[1] =
10707 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10708 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10709 }
10710 }
10711 }
10712 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10713
10714 /*precapture trigger*/
10715 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10716 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10717
10718 /*af trigger*/
10719 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10720 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10721
10722 /* ae & af regions */
10723 int32_t active_region[] = {
10724 gCamCapability[mCameraId]->active_array_size.left,
10725 gCamCapability[mCameraId]->active_array_size.top,
10726 gCamCapability[mCameraId]->active_array_size.left +
10727 gCamCapability[mCameraId]->active_array_size.width,
10728 gCamCapability[mCameraId]->active_array_size.top +
10729 gCamCapability[mCameraId]->active_array_size.height,
10730 0};
10731 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10732 sizeof(active_region) / sizeof(active_region[0]));
10733 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10734 sizeof(active_region) / sizeof(active_region[0]));
10735
10736 /* black level lock */
10737 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10738 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10739
Thierry Strudel3d639192016-09-09 11:52:26 -070010740 //special defaults for manual template
10741 if (type == CAMERA3_TEMPLATE_MANUAL) {
10742 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10743 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10744
10745 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10746 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10747
10748 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10749 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10750
10751 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10752 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10753
10754 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10755 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10756
10757 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10758 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10759 }
10760
10761
10762 /* TNR
10763 * We'll use this location to determine which modes TNR will be set.
10764 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10765 * This is not to be confused with linking on a per stream basis that decision
10766 * is still on per-session basis and will be handled as part of config stream
10767 */
10768 uint8_t tnr_enable = 0;
10769
10770 if (m_bTnrPreview || m_bTnrVideo) {
10771
10772 switch (type) {
10773 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10774 tnr_enable = 1;
10775 break;
10776
10777 default:
10778 tnr_enable = 0;
10779 break;
10780 }
10781
10782 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10783 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10784 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10785
10786 LOGD("TNR:%d with process plate %d for template:%d",
10787 tnr_enable, tnr_process_type, type);
10788 }
10789
10790 //Update Link tags to default
10791 int32_t sync_type = CAM_TYPE_STANDALONE;
10792 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10793
10794 int32_t is_main = 0; //this doesn't matter as app should overwrite
10795 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10796
10797 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10798
10799 /* CDS default */
10800 char prop[PROPERTY_VALUE_MAX];
10801 memset(prop, 0, sizeof(prop));
10802 property_get("persist.camera.CDS", prop, "Auto");
10803 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10804 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10805 if (CAM_CDS_MODE_MAX == cds_mode) {
10806 cds_mode = CAM_CDS_MODE_AUTO;
10807 }
10808
10809 /* Disabling CDS in templates which have TNR enabled*/
10810 if (tnr_enable)
10811 cds_mode = CAM_CDS_MODE_OFF;
10812
10813 int32_t mode = cds_mode;
10814 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010815
Thierry Strudel269c81a2016-10-12 12:13:59 -070010816 /* Manual Convergence AEC Speed is disabled by default*/
10817 float default_aec_speed = 0;
10818 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10819
10820 /* Manual Convergence AWB Speed is disabled by default*/
10821 float default_awb_speed = 0;
10822 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10823
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010824 // Set instant AEC to normal convergence by default
10825 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10826 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10827
Shuzhen Wang19463d72016-03-08 11:09:52 -080010828 /* hybrid ae */
10829 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10830
Thierry Strudel3d639192016-09-09 11:52:26 -070010831 mDefaultMetadata[type] = settings.release();
10832
10833 return mDefaultMetadata[type];
10834}
10835
10836/*===========================================================================
10837 * FUNCTION : setFrameParameters
10838 *
10839 * DESCRIPTION: set parameters per frame as requested in the metadata from
10840 * framework
10841 *
10842 * PARAMETERS :
10843 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010844 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010845 * @blob_request: Whether this request is a blob request or not
10846 *
10847 * RETURN : success: NO_ERROR
10848 * failure:
10849 *==========================================================================*/
10850int QCamera3HardwareInterface::setFrameParameters(
10851 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010852 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010853 int blob_request,
10854 uint32_t snapshotStreamId)
10855{
10856 /*translate from camera_metadata_t type to parm_type_t*/
10857 int rc = 0;
10858 int32_t hal_version = CAM_HAL_V3;
10859
10860 clear_metadata_buffer(mParameters);
10861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10862 LOGE("Failed to set hal version in the parameters");
10863 return BAD_VALUE;
10864 }
10865
10866 /*we need to update the frame number in the parameters*/
10867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10868 request->frame_number)) {
10869 LOGE("Failed to set the frame number in the parameters");
10870 return BAD_VALUE;
10871 }
10872
10873 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010875 LOGE("Failed to set stream type mask in the parameters");
10876 return BAD_VALUE;
10877 }
10878
10879 if (mUpdateDebugLevel) {
10880 uint32_t dummyDebugLevel = 0;
10881 /* The value of dummyDebugLevel is irrelavent. On
10882 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10884 dummyDebugLevel)) {
10885 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10886 return BAD_VALUE;
10887 }
10888 mUpdateDebugLevel = false;
10889 }
10890
10891 if(request->settings != NULL){
10892 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10893 if (blob_request)
10894 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10895 }
10896
10897 return rc;
10898}
10899
10900/*===========================================================================
10901 * FUNCTION : setReprocParameters
10902 *
10903 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10904 * return it.
10905 *
10906 * PARAMETERS :
10907 * @request : request that needs to be serviced
10908 *
10909 * RETURN : success: NO_ERROR
10910 * failure:
10911 *==========================================================================*/
10912int32_t QCamera3HardwareInterface::setReprocParameters(
10913 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10914 uint32_t snapshotStreamId)
10915{
10916 /*translate from camera_metadata_t type to parm_type_t*/
10917 int rc = 0;
10918
10919 if (NULL == request->settings){
10920 LOGE("Reprocess settings cannot be NULL");
10921 return BAD_VALUE;
10922 }
10923
10924 if (NULL == reprocParam) {
10925 LOGE("Invalid reprocessing metadata buffer");
10926 return BAD_VALUE;
10927 }
10928 clear_metadata_buffer(reprocParam);
10929
10930 /*we need to update the frame number in the parameters*/
10931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10932 request->frame_number)) {
10933 LOGE("Failed to set the frame number in the parameters");
10934 return BAD_VALUE;
10935 }
10936
10937 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10938 if (rc < 0) {
10939 LOGE("Failed to translate reproc request");
10940 return rc;
10941 }
10942
10943 CameraMetadata frame_settings;
10944 frame_settings = request->settings;
10945 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10946 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10947 int32_t *crop_count =
10948 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10949 int32_t *crop_data =
10950 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10951 int32_t *roi_map =
10952 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10953 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10954 cam_crop_data_t crop_meta;
10955 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10956 crop_meta.num_of_streams = 1;
10957 crop_meta.crop_info[0].crop.left = crop_data[0];
10958 crop_meta.crop_info[0].crop.top = crop_data[1];
10959 crop_meta.crop_info[0].crop.width = crop_data[2];
10960 crop_meta.crop_info[0].crop.height = crop_data[3];
10961
10962 crop_meta.crop_info[0].roi_map.left =
10963 roi_map[0];
10964 crop_meta.crop_info[0].roi_map.top =
10965 roi_map[1];
10966 crop_meta.crop_info[0].roi_map.width =
10967 roi_map[2];
10968 crop_meta.crop_info[0].roi_map.height =
10969 roi_map[3];
10970
10971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10972 rc = BAD_VALUE;
10973 }
10974 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10975 request->input_buffer->stream,
10976 crop_meta.crop_info[0].crop.left,
10977 crop_meta.crop_info[0].crop.top,
10978 crop_meta.crop_info[0].crop.width,
10979 crop_meta.crop_info[0].crop.height);
10980 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10981 request->input_buffer->stream,
10982 crop_meta.crop_info[0].roi_map.left,
10983 crop_meta.crop_info[0].roi_map.top,
10984 crop_meta.crop_info[0].roi_map.width,
10985 crop_meta.crop_info[0].roi_map.height);
10986 } else {
10987 LOGE("Invalid reprocess crop count %d!", *crop_count);
10988 }
10989 } else {
10990 LOGE("No crop data from matching output stream");
10991 }
10992
10993 /* These settings are not needed for regular requests so handle them specially for
10994 reprocess requests; information needed for EXIF tags */
10995 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10996 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10997 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10998 if (NAME_NOT_FOUND != val) {
10999 uint32_t flashMode = (uint32_t)val;
11000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11001 rc = BAD_VALUE;
11002 }
11003 } else {
11004 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11005 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11006 }
11007 } else {
11008 LOGH("No flash mode in reprocess settings");
11009 }
11010
11011 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11012 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11014 rc = BAD_VALUE;
11015 }
11016 } else {
11017 LOGH("No flash state in reprocess settings");
11018 }
11019
11020 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11021 uint8_t *reprocessFlags =
11022 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11024 *reprocessFlags)) {
11025 rc = BAD_VALUE;
11026 }
11027 }
11028
Thierry Strudel54dc9782017-02-15 12:12:10 -080011029 // Add exif debug data to internal metadata
11030 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11031 mm_jpeg_debug_exif_params_t *debug_params =
11032 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11033 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11034 // AE
11035 if (debug_params->ae_debug_params_valid == TRUE) {
11036 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11037 debug_params->ae_debug_params);
11038 }
11039 // AWB
11040 if (debug_params->awb_debug_params_valid == TRUE) {
11041 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11042 debug_params->awb_debug_params);
11043 }
11044 // AF
11045 if (debug_params->af_debug_params_valid == TRUE) {
11046 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11047 debug_params->af_debug_params);
11048 }
11049 // ASD
11050 if (debug_params->asd_debug_params_valid == TRUE) {
11051 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11052 debug_params->asd_debug_params);
11053 }
11054 // Stats
11055 if (debug_params->stats_debug_params_valid == TRUE) {
11056 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11057 debug_params->stats_debug_params);
11058 }
11059 // BE Stats
11060 if (debug_params->bestats_debug_params_valid == TRUE) {
11061 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11062 debug_params->bestats_debug_params);
11063 }
11064 // BHIST
11065 if (debug_params->bhist_debug_params_valid == TRUE) {
11066 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11067 debug_params->bhist_debug_params);
11068 }
11069 // 3A Tuning
11070 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11071 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11072 debug_params->q3a_tuning_debug_params);
11073 }
11074 }
11075
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011076 // Add metadata which reprocess needs
11077 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11078 cam_reprocess_info_t *repro_info =
11079 (cam_reprocess_info_t *)frame_settings.find
11080 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011081 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011082 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011083 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011084 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011085 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011086 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011087 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011088 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011089 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011090 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011091 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011092 repro_info->pipeline_flip);
11093 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11094 repro_info->af_roi);
11095 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11096 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011097 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11098 CAM_INTF_PARM_ROTATION metadata then has been added in
11099 translateToHalMetadata. HAL need to keep this new rotation
11100 metadata. Otherwise, the old rotation info saved in the vendor tag
11101 would be used */
11102 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11103 CAM_INTF_PARM_ROTATION, reprocParam) {
11104 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11105 } else {
11106 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011107 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011108 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011109 }
11110
11111 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11112 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11113 roi.width and roi.height would be the final JPEG size.
11114 For now, HAL only checks this for reprocess request */
11115 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11116 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11117 uint8_t *enable =
11118 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11119 if (*enable == TRUE) {
11120 int32_t *crop_data =
11121 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11122 cam_stream_crop_info_t crop_meta;
11123 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11124 crop_meta.stream_id = 0;
11125 crop_meta.crop.left = crop_data[0];
11126 crop_meta.crop.top = crop_data[1];
11127 crop_meta.crop.width = crop_data[2];
11128 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011129 // The JPEG crop roi should match cpp output size
11130 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11131 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11132 crop_meta.roi_map.left = 0;
11133 crop_meta.roi_map.top = 0;
11134 crop_meta.roi_map.width = cpp_crop->crop.width;
11135 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011136 }
11137 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11138 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011139 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011140 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011141 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11142 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011143 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011144 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11145
11146 // Add JPEG scale information
11147 cam_dimension_t scale_dim;
11148 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11149 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11150 int32_t *roi =
11151 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11152 scale_dim.width = roi[2];
11153 scale_dim.height = roi[3];
11154 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11155 scale_dim);
11156 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11157 scale_dim.width, scale_dim.height, mCameraId);
11158 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011159 }
11160 }
11161
11162 return rc;
11163}
11164
11165/*===========================================================================
11166 * FUNCTION : saveRequestSettings
11167 *
11168 * DESCRIPTION: Add any settings that might have changed to the request settings
11169 * and save the settings to be applied on the frame
11170 *
11171 * PARAMETERS :
11172 * @jpegMetadata : the extracted and/or modified jpeg metadata
11173 * @request : request with initial settings
11174 *
11175 * RETURN :
11176 * camera_metadata_t* : pointer to the saved request settings
11177 *==========================================================================*/
11178camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11179 const CameraMetadata &jpegMetadata,
11180 camera3_capture_request_t *request)
11181{
11182 camera_metadata_t *resultMetadata;
11183 CameraMetadata camMetadata;
11184 camMetadata = request->settings;
11185
11186 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11187 int32_t thumbnail_size[2];
11188 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11189 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11190 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11191 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11192 }
11193
11194 if (request->input_buffer != NULL) {
11195 uint8_t reprocessFlags = 1;
11196 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11197 (uint8_t*)&reprocessFlags,
11198 sizeof(reprocessFlags));
11199 }
11200
11201 resultMetadata = camMetadata.release();
11202 return resultMetadata;
11203}
11204
11205/*===========================================================================
11206 * FUNCTION : setHalFpsRange
11207 *
11208 * DESCRIPTION: set FPS range parameter
11209 *
11210 *
11211 * PARAMETERS :
11212 * @settings : Metadata from framework
11213 * @hal_metadata: Metadata buffer
11214 *
11215 *
11216 * RETURN : success: NO_ERROR
11217 * failure:
11218 *==========================================================================*/
11219int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11220 metadata_buffer_t *hal_metadata)
11221{
11222 int32_t rc = NO_ERROR;
11223 cam_fps_range_t fps_range;
11224 fps_range.min_fps = (float)
11225 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11226 fps_range.max_fps = (float)
11227 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11228 fps_range.video_min_fps = fps_range.min_fps;
11229 fps_range.video_max_fps = fps_range.max_fps;
11230
11231 LOGD("aeTargetFpsRange fps: [%f %f]",
11232 fps_range.min_fps, fps_range.max_fps);
11233 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11234 * follows:
11235 * ---------------------------------------------------------------|
11236 * Video stream is absent in configure_streams |
11237 * (Camcorder preview before the first video record |
11238 * ---------------------------------------------------------------|
11239 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11240 * | | | vid_min/max_fps|
11241 * ---------------------------------------------------------------|
11242 * NO | [ 30, 240] | 240 | [240, 240] |
11243 * |-------------|-------------|----------------|
11244 * | [240, 240] | 240 | [240, 240] |
11245 * ---------------------------------------------------------------|
11246 * Video stream is present in configure_streams |
11247 * ---------------------------------------------------------------|
11248 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11249 * | | | vid_min/max_fps|
11250 * ---------------------------------------------------------------|
11251 * NO | [ 30, 240] | 240 | [240, 240] |
11252 * (camcorder prev |-------------|-------------|----------------|
11253 * after video rec | [240, 240] | 240 | [240, 240] |
11254 * is stopped) | | | |
11255 * ---------------------------------------------------------------|
11256 * YES | [ 30, 240] | 240 | [240, 240] |
11257 * |-------------|-------------|----------------|
11258 * | [240, 240] | 240 | [240, 240] |
11259 * ---------------------------------------------------------------|
11260 * When Video stream is absent in configure_streams,
11261 * preview fps = sensor_fps / batchsize
11262 * Eg: for 240fps at batchSize 4, preview = 60fps
11263 * for 120fps at batchSize 4, preview = 30fps
11264 *
11265 * When video stream is present in configure_streams, preview fps is as per
11266 * the ratio of preview buffers to video buffers requested in process
11267 * capture request
11268 */
11269 mBatchSize = 0;
11270 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11271 fps_range.min_fps = fps_range.video_max_fps;
11272 fps_range.video_min_fps = fps_range.video_max_fps;
11273 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11274 fps_range.max_fps);
11275 if (NAME_NOT_FOUND != val) {
11276 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11278 return BAD_VALUE;
11279 }
11280
11281 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11282 /* If batchmode is currently in progress and the fps changes,
11283 * set the flag to restart the sensor */
11284 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11285 (mHFRVideoFps != fps_range.max_fps)) {
11286 mNeedSensorRestart = true;
11287 }
11288 mHFRVideoFps = fps_range.max_fps;
11289 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11290 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11291 mBatchSize = MAX_HFR_BATCH_SIZE;
11292 }
11293 }
11294 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11295
11296 }
11297 } else {
11298 /* HFR mode is session param in backend/ISP. This should be reset when
11299 * in non-HFR mode */
11300 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11302 return BAD_VALUE;
11303 }
11304 }
11305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11306 return BAD_VALUE;
11307 }
11308 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11309 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11310 return rc;
11311}
11312
11313/*===========================================================================
11314 * FUNCTION : translateToHalMetadata
11315 *
11316 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11317 *
11318 *
11319 * PARAMETERS :
11320 * @request : request sent from framework
11321 *
11322 *
11323 * RETURN : success: NO_ERROR
11324 * failure:
11325 *==========================================================================*/
11326int QCamera3HardwareInterface::translateToHalMetadata
11327 (const camera3_capture_request_t *request,
11328 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011329 uint32_t snapshotStreamId) {
11330 if (request == nullptr || hal_metadata == nullptr) {
11331 return BAD_VALUE;
11332 }
11333
11334 int64_t minFrameDuration = getMinFrameDuration(request);
11335
11336 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11337 minFrameDuration);
11338}
11339
11340int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11341 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11342 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11343
Thierry Strudel3d639192016-09-09 11:52:26 -070011344 int rc = 0;
11345 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011346 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011347
11348 /* Do not change the order of the following list unless you know what you are
11349 * doing.
11350 * The order is laid out in such a way that parameters in the front of the table
11351 * may be used to override the parameters later in the table. Examples are:
11352 * 1. META_MODE should precede AEC/AWB/AF MODE
11353 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11354 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11355 * 4. Any mode should precede it's corresponding settings
11356 */
11357 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11358 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11360 rc = BAD_VALUE;
11361 }
11362 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11363 if (rc != NO_ERROR) {
11364 LOGE("extractSceneMode failed");
11365 }
11366 }
11367
11368 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11369 uint8_t fwk_aeMode =
11370 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11371 uint8_t aeMode;
11372 int32_t redeye;
11373
11374 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11375 aeMode = CAM_AE_MODE_OFF;
11376 } else {
11377 aeMode = CAM_AE_MODE_ON;
11378 }
11379 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11380 redeye = 1;
11381 } else {
11382 redeye = 0;
11383 }
11384
11385 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11386 fwk_aeMode);
11387 if (NAME_NOT_FOUND != val) {
11388 int32_t flashMode = (int32_t)val;
11389 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11390 }
11391
11392 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11394 rc = BAD_VALUE;
11395 }
11396 }
11397
11398 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11399 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11400 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11401 fwk_whiteLevel);
11402 if (NAME_NOT_FOUND != val) {
11403 uint8_t whiteLevel = (uint8_t)val;
11404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11405 rc = BAD_VALUE;
11406 }
11407 }
11408 }
11409
11410 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11411 uint8_t fwk_cacMode =
11412 frame_settings.find(
11413 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11414 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11415 fwk_cacMode);
11416 if (NAME_NOT_FOUND != val) {
11417 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11418 bool entryAvailable = FALSE;
11419 // Check whether Frameworks set CAC mode is supported in device or not
11420 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11421 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11422 entryAvailable = TRUE;
11423 break;
11424 }
11425 }
11426 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11427 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11428 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11429 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11430 if (entryAvailable == FALSE) {
11431 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11432 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11433 } else {
11434 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11435 // High is not supported and so set the FAST as spec say's underlying
11436 // device implementation can be the same for both modes.
11437 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11438 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11439 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11440 // in order to avoid the fps drop due to high quality
11441 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11442 } else {
11443 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11444 }
11445 }
11446 }
11447 LOGD("Final cacMode is %d", cacMode);
11448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11449 rc = BAD_VALUE;
11450 }
11451 } else {
11452 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11453 }
11454 }
11455
Thierry Strudel2896d122017-02-23 19:18:03 -080011456 char af_value[PROPERTY_VALUE_MAX];
11457 property_get("persist.camera.af.infinity", af_value, "0");
11458
Jason Lee84ae9972017-02-24 13:24:24 -080011459 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011460 if (atoi(af_value) == 0) {
11461 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011462 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011463 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11464 fwk_focusMode);
11465 if (NAME_NOT_FOUND != val) {
11466 uint8_t focusMode = (uint8_t)val;
11467 LOGD("set focus mode %d", focusMode);
11468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11469 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11470 rc = BAD_VALUE;
11471 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011472 }
11473 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011474 } else {
11475 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11476 LOGE("Focus forced to infinity %d", focusMode);
11477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11478 rc = BAD_VALUE;
11479 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011480 }
11481
Jason Lee84ae9972017-02-24 13:24:24 -080011482 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11483 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011484 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11486 focalDistance)) {
11487 rc = BAD_VALUE;
11488 }
11489 }
11490
11491 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11492 uint8_t fwk_antibandingMode =
11493 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11494 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11495 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11496 if (NAME_NOT_FOUND != val) {
11497 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011498 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11499 if (m60HzZone) {
11500 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11501 } else {
11502 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11503 }
11504 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11506 hal_antibandingMode)) {
11507 rc = BAD_VALUE;
11508 }
11509 }
11510 }
11511
11512 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11513 int32_t expCompensation = frame_settings.find(
11514 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11515 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11516 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11517 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11518 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011519 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011520 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11521 expCompensation)) {
11522 rc = BAD_VALUE;
11523 }
11524 }
11525
11526 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11527 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11529 rc = BAD_VALUE;
11530 }
11531 }
11532 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11533 rc = setHalFpsRange(frame_settings, hal_metadata);
11534 if (rc != NO_ERROR) {
11535 LOGE("setHalFpsRange failed");
11536 }
11537 }
11538
11539 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11540 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11541 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11542 rc = BAD_VALUE;
11543 }
11544 }
11545
11546 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11547 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11548 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11549 fwk_effectMode);
11550 if (NAME_NOT_FOUND != val) {
11551 uint8_t effectMode = (uint8_t)val;
11552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11553 rc = BAD_VALUE;
11554 }
11555 }
11556 }
11557
11558 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11559 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11561 colorCorrectMode)) {
11562 rc = BAD_VALUE;
11563 }
11564 }
11565
11566 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11567 cam_color_correct_gains_t colorCorrectGains;
11568 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11569 colorCorrectGains.gains[i] =
11570 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11571 }
11572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11573 colorCorrectGains)) {
11574 rc = BAD_VALUE;
11575 }
11576 }
11577
11578 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11579 cam_color_correct_matrix_t colorCorrectTransform;
11580 cam_rational_type_t transform_elem;
11581 size_t num = 0;
11582 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11583 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11584 transform_elem.numerator =
11585 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11586 transform_elem.denominator =
11587 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11588 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11589 num++;
11590 }
11591 }
11592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11593 colorCorrectTransform)) {
11594 rc = BAD_VALUE;
11595 }
11596 }
11597
11598 cam_trigger_t aecTrigger;
11599 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11600 aecTrigger.trigger_id = -1;
11601 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11602 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11603 aecTrigger.trigger =
11604 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11605 aecTrigger.trigger_id =
11606 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11608 aecTrigger)) {
11609 rc = BAD_VALUE;
11610 }
11611 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11612 aecTrigger.trigger, aecTrigger.trigger_id);
11613 }
11614
11615 /*af_trigger must come with a trigger id*/
11616 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11617 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11618 cam_trigger_t af_trigger;
11619 af_trigger.trigger =
11620 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11621 af_trigger.trigger_id =
11622 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11624 rc = BAD_VALUE;
11625 }
11626 LOGD("AfTrigger: %d AfTriggerID: %d",
11627 af_trigger.trigger, af_trigger.trigger_id);
11628 }
11629
11630 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11631 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11633 rc = BAD_VALUE;
11634 }
11635 }
11636 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11637 cam_edge_application_t edge_application;
11638 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011639
Thierry Strudel3d639192016-09-09 11:52:26 -070011640 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11641 edge_application.sharpness = 0;
11642 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011643 edge_application.sharpness =
11644 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11645 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11646 int32_t sharpness =
11647 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11648 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11649 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11650 LOGD("Setting edge mode sharpness %d", sharpness);
11651 edge_application.sharpness = sharpness;
11652 }
11653 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011654 }
11655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11656 rc = BAD_VALUE;
11657 }
11658 }
11659
11660 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11661 int32_t respectFlashMode = 1;
11662 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11663 uint8_t fwk_aeMode =
11664 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11665 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11666 respectFlashMode = 0;
11667 LOGH("AE Mode controls flash, ignore android.flash.mode");
11668 }
11669 }
11670 if (respectFlashMode) {
11671 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11672 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11673 LOGH("flash mode after mapping %d", val);
11674 // To check: CAM_INTF_META_FLASH_MODE usage
11675 if (NAME_NOT_FOUND != val) {
11676 uint8_t flashMode = (uint8_t)val;
11677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11678 rc = BAD_VALUE;
11679 }
11680 }
11681 }
11682 }
11683
11684 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11685 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11687 rc = BAD_VALUE;
11688 }
11689 }
11690
11691 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11692 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11694 flashFiringTime)) {
11695 rc = BAD_VALUE;
11696 }
11697 }
11698
11699 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11700 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11701 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11702 hotPixelMode)) {
11703 rc = BAD_VALUE;
11704 }
11705 }
11706
11707 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11708 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11710 lensAperture)) {
11711 rc = BAD_VALUE;
11712 }
11713 }
11714
11715 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11716 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11718 filterDensity)) {
11719 rc = BAD_VALUE;
11720 }
11721 }
11722
11723 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11724 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11726 focalLength)) {
11727 rc = BAD_VALUE;
11728 }
11729 }
11730
11731 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11732 uint8_t optStabMode =
11733 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11735 optStabMode)) {
11736 rc = BAD_VALUE;
11737 }
11738 }
11739
11740 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11741 uint8_t videoStabMode =
11742 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11743 LOGD("videoStabMode from APP = %d", videoStabMode);
11744 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11745 videoStabMode)) {
11746 rc = BAD_VALUE;
11747 }
11748 }
11749
11750
11751 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11752 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11754 noiseRedMode)) {
11755 rc = BAD_VALUE;
11756 }
11757 }
11758
11759 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11760 float reprocessEffectiveExposureFactor =
11761 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11763 reprocessEffectiveExposureFactor)) {
11764 rc = BAD_VALUE;
11765 }
11766 }
11767
11768 cam_crop_region_t scalerCropRegion;
11769 bool scalerCropSet = false;
11770 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11771 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11772 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11773 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11774 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11775
11776 // Map coordinate system from active array to sensor output.
11777 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11778 scalerCropRegion.width, scalerCropRegion.height);
11779
11780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11781 scalerCropRegion)) {
11782 rc = BAD_VALUE;
11783 }
11784 scalerCropSet = true;
11785 }
11786
11787 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11788 int64_t sensorExpTime =
11789 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11790 LOGD("setting sensorExpTime %lld", sensorExpTime);
11791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11792 sensorExpTime)) {
11793 rc = BAD_VALUE;
11794 }
11795 }
11796
11797 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11798 int64_t sensorFrameDuration =
11799 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011800 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11801 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11802 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11803 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11805 sensorFrameDuration)) {
11806 rc = BAD_VALUE;
11807 }
11808 }
11809
11810 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11811 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11812 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11813 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11814 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11815 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11816 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11818 sensorSensitivity)) {
11819 rc = BAD_VALUE;
11820 }
11821 }
11822
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011823#ifndef USE_HAL_3_3
11824 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11825 int32_t ispSensitivity =
11826 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11827 if (ispSensitivity <
11828 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11829 ispSensitivity =
11830 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11831 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11832 }
11833 if (ispSensitivity >
11834 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11835 ispSensitivity =
11836 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11837 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11838 }
11839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11840 ispSensitivity)) {
11841 rc = BAD_VALUE;
11842 }
11843 }
11844#endif
11845
Thierry Strudel3d639192016-09-09 11:52:26 -070011846 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11847 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11849 rc = BAD_VALUE;
11850 }
11851 }
11852
11853 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11854 uint8_t fwk_facedetectMode =
11855 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11856
11857 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11858 fwk_facedetectMode);
11859
11860 if (NAME_NOT_FOUND != val) {
11861 uint8_t facedetectMode = (uint8_t)val;
11862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11863 facedetectMode)) {
11864 rc = BAD_VALUE;
11865 }
11866 }
11867 }
11868
Thierry Strudel54dc9782017-02-15 12:12:10 -080011869 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011870 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011871 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11873 histogramMode)) {
11874 rc = BAD_VALUE;
11875 }
11876 }
11877
11878 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11879 uint8_t sharpnessMapMode =
11880 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11881 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11882 sharpnessMapMode)) {
11883 rc = BAD_VALUE;
11884 }
11885 }
11886
11887 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11888 uint8_t tonemapMode =
11889 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11890 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11891 rc = BAD_VALUE;
11892 }
11893 }
11894 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11895 /*All tonemap channels will have the same number of points*/
11896 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11897 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11898 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11899 cam_rgb_tonemap_curves tonemapCurves;
11900 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11901 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11902 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11903 tonemapCurves.tonemap_points_cnt,
11904 CAM_MAX_TONEMAP_CURVE_SIZE);
11905 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11906 }
11907
11908 /* ch0 = G*/
11909 size_t point = 0;
11910 cam_tonemap_curve_t tonemapCurveGreen;
11911 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11912 for (size_t j = 0; j < 2; j++) {
11913 tonemapCurveGreen.tonemap_points[i][j] =
11914 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11915 point++;
11916 }
11917 }
11918 tonemapCurves.curves[0] = tonemapCurveGreen;
11919
11920 /* ch 1 = B */
11921 point = 0;
11922 cam_tonemap_curve_t tonemapCurveBlue;
11923 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11924 for (size_t j = 0; j < 2; j++) {
11925 tonemapCurveBlue.tonemap_points[i][j] =
11926 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11927 point++;
11928 }
11929 }
11930 tonemapCurves.curves[1] = tonemapCurveBlue;
11931
11932 /* ch 2 = R */
11933 point = 0;
11934 cam_tonemap_curve_t tonemapCurveRed;
11935 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11936 for (size_t j = 0; j < 2; j++) {
11937 tonemapCurveRed.tonemap_points[i][j] =
11938 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11939 point++;
11940 }
11941 }
11942 tonemapCurves.curves[2] = tonemapCurveRed;
11943
11944 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11945 tonemapCurves)) {
11946 rc = BAD_VALUE;
11947 }
11948 }
11949
11950 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11951 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11953 captureIntent)) {
11954 rc = BAD_VALUE;
11955 }
11956 }
11957
11958 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11959 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11961 blackLevelLock)) {
11962 rc = BAD_VALUE;
11963 }
11964 }
11965
11966 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11967 uint8_t lensShadingMapMode =
11968 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11970 lensShadingMapMode)) {
11971 rc = BAD_VALUE;
11972 }
11973 }
11974
11975 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11976 cam_area_t roi;
11977 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011978 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011979
11980 // Map coordinate system from active array to sensor output.
11981 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11982 roi.rect.height);
11983
11984 if (scalerCropSet) {
11985 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11986 }
11987 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11988 rc = BAD_VALUE;
11989 }
11990 }
11991
11992 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11993 cam_area_t roi;
11994 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011995 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011996
11997 // Map coordinate system from active array to sensor output.
11998 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11999 roi.rect.height);
12000
12001 if (scalerCropSet) {
12002 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12003 }
12004 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12005 rc = BAD_VALUE;
12006 }
12007 }
12008
12009 // CDS for non-HFR non-video mode
12010 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12011 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12012 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12013 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12014 LOGE("Invalid CDS mode %d!", *fwk_cds);
12015 } else {
12016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12017 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12018 rc = BAD_VALUE;
12019 }
12020 }
12021 }
12022
Thierry Strudel04e026f2016-10-10 11:27:36 -070012023 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012024 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012025 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012026 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12027 }
12028 if (m_bVideoHdrEnabled)
12029 vhdr = CAM_VIDEO_HDR_MODE_ON;
12030
Thierry Strudel54dc9782017-02-15 12:12:10 -080012031 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12032
12033 if(vhdr != curr_hdr_state)
12034 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12035
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012036 rc = setVideoHdrMode(mParameters, vhdr);
12037 if (rc != NO_ERROR) {
12038 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012039 }
12040
12041 //IR
12042 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12043 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12044 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012045 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12046 uint8_t isIRon = 0;
12047
12048 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012049 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12050 LOGE("Invalid IR mode %d!", fwk_ir);
12051 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012052 if(isIRon != curr_ir_state )
12053 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12054
Thierry Strudel04e026f2016-10-10 11:27:36 -070012055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12056 CAM_INTF_META_IR_MODE, fwk_ir)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060 }
12061
Thierry Strudel54dc9782017-02-15 12:12:10 -080012062 //Binning Correction Mode
12063 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12064 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12065 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12066 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12067 || (0 > fwk_binning_correction)) {
12068 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12069 } else {
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12071 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12072 rc = BAD_VALUE;
12073 }
12074 }
12075 }
12076
Thierry Strudel269c81a2016-10-12 12:13:59 -070012077 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12078 float aec_speed;
12079 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12080 LOGD("AEC Speed :%f", aec_speed);
12081 if ( aec_speed < 0 ) {
12082 LOGE("Invalid AEC mode %f!", aec_speed);
12083 } else {
12084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12085 aec_speed)) {
12086 rc = BAD_VALUE;
12087 }
12088 }
12089 }
12090
12091 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12092 float awb_speed;
12093 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12094 LOGD("AWB Speed :%f", awb_speed);
12095 if ( awb_speed < 0 ) {
12096 LOGE("Invalid AWB mode %f!", awb_speed);
12097 } else {
12098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12099 awb_speed)) {
12100 rc = BAD_VALUE;
12101 }
12102 }
12103 }
12104
Thierry Strudel3d639192016-09-09 11:52:26 -070012105 // TNR
12106 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12107 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12108 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012109 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012110 cam_denoise_param_t tnr;
12111 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12112 tnr.process_plates =
12113 (cam_denoise_process_type_t)frame_settings.find(
12114 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12115 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012116
12117 if(b_TnrRequested != curr_tnr_state)
12118 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12119
Thierry Strudel3d639192016-09-09 11:52:26 -070012120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12121 rc = BAD_VALUE;
12122 }
12123 }
12124
Thierry Strudel54dc9782017-02-15 12:12:10 -080012125 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012126 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012127 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12129 *exposure_metering_mode)) {
12130 rc = BAD_VALUE;
12131 }
12132 }
12133
Thierry Strudel3d639192016-09-09 11:52:26 -070012134 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12135 int32_t fwk_testPatternMode =
12136 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12137 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12138 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12139
12140 if (NAME_NOT_FOUND != testPatternMode) {
12141 cam_test_pattern_data_t testPatternData;
12142 memset(&testPatternData, 0, sizeof(testPatternData));
12143 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12144 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12145 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12146 int32_t *fwk_testPatternData =
12147 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12148 testPatternData.r = fwk_testPatternData[0];
12149 testPatternData.b = fwk_testPatternData[3];
12150 switch (gCamCapability[mCameraId]->color_arrangement) {
12151 case CAM_FILTER_ARRANGEMENT_RGGB:
12152 case CAM_FILTER_ARRANGEMENT_GRBG:
12153 testPatternData.gr = fwk_testPatternData[1];
12154 testPatternData.gb = fwk_testPatternData[2];
12155 break;
12156 case CAM_FILTER_ARRANGEMENT_GBRG:
12157 case CAM_FILTER_ARRANGEMENT_BGGR:
12158 testPatternData.gr = fwk_testPatternData[2];
12159 testPatternData.gb = fwk_testPatternData[1];
12160 break;
12161 default:
12162 LOGE("color arrangement %d is not supported",
12163 gCamCapability[mCameraId]->color_arrangement);
12164 break;
12165 }
12166 }
12167 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12168 testPatternData)) {
12169 rc = BAD_VALUE;
12170 }
12171 } else {
12172 LOGE("Invalid framework sensor test pattern mode %d",
12173 fwk_testPatternMode);
12174 }
12175 }
12176
12177 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12178 size_t count = 0;
12179 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12180 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12181 gps_coords.data.d, gps_coords.count, count);
12182 if (gps_coords.count != count) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12188 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12189 size_t count = 0;
12190 const char *gps_methods_src = (const char *)
12191 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12192 memset(gps_methods, '\0', sizeof(gps_methods));
12193 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12194 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12195 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12196 if (GPS_PROCESSING_METHOD_SIZE != count) {
12197 rc = BAD_VALUE;
12198 }
12199 }
12200
12201 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12202 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12204 gps_timestamp)) {
12205 rc = BAD_VALUE;
12206 }
12207 }
12208
12209 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12210 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12211 cam_rotation_info_t rotation_info;
12212 if (orientation == 0) {
12213 rotation_info.rotation = ROTATE_0;
12214 } else if (orientation == 90) {
12215 rotation_info.rotation = ROTATE_90;
12216 } else if (orientation == 180) {
12217 rotation_info.rotation = ROTATE_180;
12218 } else if (orientation == 270) {
12219 rotation_info.rotation = ROTATE_270;
12220 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012221 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012222 rotation_info.streamId = snapshotStreamId;
12223 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12225 rc = BAD_VALUE;
12226 }
12227 }
12228
12229 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12230 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12237 uint32_t thumb_quality = (uint32_t)
12238 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12240 thumb_quality)) {
12241 rc = BAD_VALUE;
12242 }
12243 }
12244
12245 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12246 cam_dimension_t dim;
12247 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12248 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253
12254 // Internal metadata
12255 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12256 size_t count = 0;
12257 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12258 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12259 privatedata.data.i32, privatedata.count, count);
12260 if (privatedata.count != count) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012265 // ISO/Exposure Priority
12266 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12267 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12268 cam_priority_mode_t mode =
12269 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12270 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12271 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12272 use_iso_exp_pty.previewOnly = FALSE;
12273 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12274 use_iso_exp_pty.value = *ptr;
12275
12276 if(CAM_ISO_PRIORITY == mode) {
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12278 use_iso_exp_pty)) {
12279 rc = BAD_VALUE;
12280 }
12281 }
12282 else {
12283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12284 use_iso_exp_pty)) {
12285 rc = BAD_VALUE;
12286 }
12287 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012288
12289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293 } else {
12294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12295 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012296 }
12297 }
12298
12299 // Saturation
12300 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12301 int32_t* use_saturation =
12302 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12303 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12304 rc = BAD_VALUE;
12305 }
12306 }
12307
Thierry Strudel3d639192016-09-09 11:52:26 -070012308 // EV step
12309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12310 gCamCapability[mCameraId]->exp_compensation_step)) {
12311 rc = BAD_VALUE;
12312 }
12313
12314 // CDS info
12315 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12316 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12317 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12318
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12320 CAM_INTF_META_CDS_DATA, *cdsData)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324
Shuzhen Wang19463d72016-03-08 11:09:52 -080012325 // Hybrid AE
12326 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12327 uint8_t *hybrid_ae = (uint8_t *)
12328 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12329
12330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12331 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12332 rc = BAD_VALUE;
12333 }
12334 }
12335
Shuzhen Wang14415f52016-11-16 18:26:18 -080012336 // Histogram
12337 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12338 uint8_t histogramMode =
12339 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12341 histogramMode)) {
12342 rc = BAD_VALUE;
12343 }
12344 }
12345
12346 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12347 int32_t histogramBins =
12348 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12350 histogramBins)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354
Thierry Strudel3d639192016-09-09 11:52:26 -070012355 return rc;
12356}
12357
12358/*===========================================================================
12359 * FUNCTION : captureResultCb
12360 *
12361 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12362 *
12363 * PARAMETERS :
12364 * @frame : frame information from mm-camera-interface
12365 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12366 * @userdata: userdata
12367 *
12368 * RETURN : NONE
12369 *==========================================================================*/
12370void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12371 camera3_stream_buffer_t *buffer,
12372 uint32_t frame_number, bool isInputBuffer, void *userdata)
12373{
12374 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12375 if (hw == NULL) {
12376 LOGE("Invalid hw %p", hw);
12377 return;
12378 }
12379
12380 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12381 return;
12382}
12383
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012384/*===========================================================================
12385 * FUNCTION : setBufferErrorStatus
12386 *
12387 * DESCRIPTION: Callback handler for channels to report any buffer errors
12388 *
12389 * PARAMETERS :
12390 * @ch : Channel on which buffer error is reported from
12391 * @frame_number : frame number on which buffer error is reported on
12392 * @buffer_status : buffer error status
12393 * @userdata: userdata
12394 *
12395 * RETURN : NONE
12396 *==========================================================================*/
12397void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12398 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12399{
12400 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12401 if (hw == NULL) {
12402 LOGE("Invalid hw %p", hw);
12403 return;
12404 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012405
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012406 hw->setBufferErrorStatus(ch, frame_number, err);
12407 return;
12408}
12409
12410void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12411 uint32_t frameNumber, camera3_buffer_status_t err)
12412{
12413 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12414 pthread_mutex_lock(&mMutex);
12415
12416 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12417 if (req.frame_number != frameNumber)
12418 continue;
12419 for (auto& k : req.mPendingBufferList) {
12420 if(k.stream->priv == ch) {
12421 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12422 }
12423 }
12424 }
12425
12426 pthread_mutex_unlock(&mMutex);
12427 return;
12428}
Thierry Strudel3d639192016-09-09 11:52:26 -070012429/*===========================================================================
12430 * FUNCTION : initialize
12431 *
12432 * DESCRIPTION: Pass framework callback pointers to HAL
12433 *
12434 * PARAMETERS :
12435 *
12436 *
12437 * RETURN : Success : 0
12438 * Failure: -ENODEV
12439 *==========================================================================*/
12440
12441int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12442 const camera3_callback_ops_t *callback_ops)
12443{
12444 LOGD("E");
12445 QCamera3HardwareInterface *hw =
12446 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12447 if (!hw) {
12448 LOGE("NULL camera device");
12449 return -ENODEV;
12450 }
12451
12452 int rc = hw->initialize(callback_ops);
12453 LOGD("X");
12454 return rc;
12455}
12456
12457/*===========================================================================
12458 * FUNCTION : configure_streams
12459 *
12460 * DESCRIPTION:
12461 *
12462 * PARAMETERS :
12463 *
12464 *
12465 * RETURN : Success: 0
12466 * Failure: -EINVAL (if stream configuration is invalid)
12467 * -ENODEV (fatal error)
12468 *==========================================================================*/
12469
12470int QCamera3HardwareInterface::configure_streams(
12471 const struct camera3_device *device,
12472 camera3_stream_configuration_t *stream_list)
12473{
12474 LOGD("E");
12475 QCamera3HardwareInterface *hw =
12476 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12477 if (!hw) {
12478 LOGE("NULL camera device");
12479 return -ENODEV;
12480 }
12481 int rc = hw->configureStreams(stream_list);
12482 LOGD("X");
12483 return rc;
12484}
12485
12486/*===========================================================================
12487 * FUNCTION : construct_default_request_settings
12488 *
12489 * DESCRIPTION: Configure a settings buffer to meet the required use case
12490 *
12491 * PARAMETERS :
12492 *
12493 *
12494 * RETURN : Success: Return valid metadata
12495 * Failure: Return NULL
12496 *==========================================================================*/
12497const camera_metadata_t* QCamera3HardwareInterface::
12498 construct_default_request_settings(const struct camera3_device *device,
12499 int type)
12500{
12501
12502 LOGD("E");
12503 camera_metadata_t* fwk_metadata = NULL;
12504 QCamera3HardwareInterface *hw =
12505 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12506 if (!hw) {
12507 LOGE("NULL camera device");
12508 return NULL;
12509 }
12510
12511 fwk_metadata = hw->translateCapabilityToMetadata(type);
12512
12513 LOGD("X");
12514 return fwk_metadata;
12515}
12516
12517/*===========================================================================
12518 * FUNCTION : process_capture_request
12519 *
12520 * DESCRIPTION:
12521 *
12522 * PARAMETERS :
12523 *
12524 *
12525 * RETURN :
12526 *==========================================================================*/
12527int QCamera3HardwareInterface::process_capture_request(
12528 const struct camera3_device *device,
12529 camera3_capture_request_t *request)
12530{
12531 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012532 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012533 QCamera3HardwareInterface *hw =
12534 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12535 if (!hw) {
12536 LOGE("NULL camera device");
12537 return -EINVAL;
12538 }
12539
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012540 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012541 LOGD("X");
12542 return rc;
12543}
12544
12545/*===========================================================================
12546 * FUNCTION : dump
12547 *
12548 * DESCRIPTION:
12549 *
12550 * PARAMETERS :
12551 *
12552 *
12553 * RETURN :
12554 *==========================================================================*/
12555
12556void QCamera3HardwareInterface::dump(
12557 const struct camera3_device *device, int fd)
12558{
12559 /* Log level property is read when "adb shell dumpsys media.camera" is
12560 called so that the log level can be controlled without restarting
12561 the media server */
12562 getLogLevel();
12563
12564 LOGD("E");
12565 QCamera3HardwareInterface *hw =
12566 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12567 if (!hw) {
12568 LOGE("NULL camera device");
12569 return;
12570 }
12571
12572 hw->dump(fd);
12573 LOGD("X");
12574 return;
12575}
12576
12577/*===========================================================================
12578 * FUNCTION : flush
12579 *
12580 * DESCRIPTION:
12581 *
12582 * PARAMETERS :
12583 *
12584 *
12585 * RETURN :
12586 *==========================================================================*/
12587
12588int QCamera3HardwareInterface::flush(
12589 const struct camera3_device *device)
12590{
12591 int rc;
12592 LOGD("E");
12593 QCamera3HardwareInterface *hw =
12594 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12595 if (!hw) {
12596 LOGE("NULL camera device");
12597 return -EINVAL;
12598 }
12599
12600 pthread_mutex_lock(&hw->mMutex);
12601 // Validate current state
12602 switch (hw->mState) {
12603 case STARTED:
12604 /* valid state */
12605 break;
12606
12607 case ERROR:
12608 pthread_mutex_unlock(&hw->mMutex);
12609 hw->handleCameraDeviceError();
12610 return -ENODEV;
12611
12612 default:
12613 LOGI("Flush returned during state %d", hw->mState);
12614 pthread_mutex_unlock(&hw->mMutex);
12615 return 0;
12616 }
12617 pthread_mutex_unlock(&hw->mMutex);
12618
12619 rc = hw->flush(true /* restart channels */ );
12620 LOGD("X");
12621 return rc;
12622}
12623
12624/*===========================================================================
12625 * FUNCTION : close_camera_device
12626 *
12627 * DESCRIPTION:
12628 *
12629 * PARAMETERS :
12630 *
12631 *
12632 * RETURN :
12633 *==========================================================================*/
12634int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12635{
12636 int ret = NO_ERROR;
12637 QCamera3HardwareInterface *hw =
12638 reinterpret_cast<QCamera3HardwareInterface *>(
12639 reinterpret_cast<camera3_device_t *>(device)->priv);
12640 if (!hw) {
12641 LOGE("NULL camera device");
12642 return BAD_VALUE;
12643 }
12644
12645 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12646 delete hw;
12647 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012648 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012649 return ret;
12650}
12651
12652/*===========================================================================
12653 * FUNCTION : getWaveletDenoiseProcessPlate
12654 *
12655 * DESCRIPTION: query wavelet denoise process plate
12656 *
12657 * PARAMETERS : None
12658 *
12659 * RETURN : WNR prcocess plate value
12660 *==========================================================================*/
12661cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12662{
12663 char prop[PROPERTY_VALUE_MAX];
12664 memset(prop, 0, sizeof(prop));
12665 property_get("persist.denoise.process.plates", prop, "0");
12666 int processPlate = atoi(prop);
12667 switch(processPlate) {
12668 case 0:
12669 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12670 case 1:
12671 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12672 case 2:
12673 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12674 case 3:
12675 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12676 default:
12677 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12678 }
12679}
12680
12681
12682/*===========================================================================
12683 * FUNCTION : getTemporalDenoiseProcessPlate
12684 *
12685 * DESCRIPTION: query temporal denoise process plate
12686 *
12687 * PARAMETERS : None
12688 *
12689 * RETURN : TNR prcocess plate value
12690 *==========================================================================*/
12691cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12692{
12693 char prop[PROPERTY_VALUE_MAX];
12694 memset(prop, 0, sizeof(prop));
12695 property_get("persist.tnr.process.plates", prop, "0");
12696 int processPlate = atoi(prop);
12697 switch(processPlate) {
12698 case 0:
12699 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12700 case 1:
12701 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12702 case 2:
12703 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12704 case 3:
12705 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12706 default:
12707 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12708 }
12709}
12710
12711
12712/*===========================================================================
12713 * FUNCTION : extractSceneMode
12714 *
12715 * DESCRIPTION: Extract scene mode from frameworks set metadata
12716 *
12717 * PARAMETERS :
12718 * @frame_settings: CameraMetadata reference
12719 * @metaMode: ANDROID_CONTORL_MODE
12720 * @hal_metadata: hal metadata structure
12721 *
12722 * RETURN : None
12723 *==========================================================================*/
12724int32_t QCamera3HardwareInterface::extractSceneMode(
12725 const CameraMetadata &frame_settings, uint8_t metaMode,
12726 metadata_buffer_t *hal_metadata)
12727{
12728 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012729 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12730
12731 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12732 LOGD("Ignoring control mode OFF_KEEP_STATE");
12733 return NO_ERROR;
12734 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012735
12736 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12737 camera_metadata_ro_entry entry =
12738 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12739 if (0 == entry.count)
12740 return rc;
12741
12742 uint8_t fwk_sceneMode = entry.data.u8[0];
12743
12744 int val = lookupHalName(SCENE_MODES_MAP,
12745 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12746 fwk_sceneMode);
12747 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012748 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012749 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012750 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012751 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012752
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012753 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12754 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12755 }
12756
12757 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12758 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012759 cam_hdr_param_t hdr_params;
12760 hdr_params.hdr_enable = 1;
12761 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12762 hdr_params.hdr_need_1x = false;
12763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12764 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12765 rc = BAD_VALUE;
12766 }
12767 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012768
Thierry Strudel3d639192016-09-09 11:52:26 -070012769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12770 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12771 rc = BAD_VALUE;
12772 }
12773 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012774
12775 if (mForceHdrSnapshot) {
12776 cam_hdr_param_t hdr_params;
12777 hdr_params.hdr_enable = 1;
12778 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12779 hdr_params.hdr_need_1x = false;
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12781 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12782 rc = BAD_VALUE;
12783 }
12784 }
12785
Thierry Strudel3d639192016-09-09 11:52:26 -070012786 return rc;
12787}
12788
12789/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012790 * FUNCTION : setVideoHdrMode
12791 *
12792 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12793 *
12794 * PARAMETERS :
12795 * @hal_metadata: hal metadata structure
12796 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12797 *
12798 * RETURN : None
12799 *==========================================================================*/
12800int32_t QCamera3HardwareInterface::setVideoHdrMode(
12801 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12802{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012803 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12804 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12805 }
12806
12807 LOGE("Invalid Video HDR mode %d!", vhdr);
12808 return BAD_VALUE;
12809}
12810
12811/*===========================================================================
12812 * FUNCTION : setSensorHDR
12813 *
12814 * DESCRIPTION: Enable/disable sensor HDR.
12815 *
12816 * PARAMETERS :
12817 * @hal_metadata: hal metadata structure
12818 * @enable: boolean whether to enable/disable sensor HDR
12819 *
12820 * RETURN : None
12821 *==========================================================================*/
12822int32_t QCamera3HardwareInterface::setSensorHDR(
12823 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12824{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012825 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012826 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12827
12828 if (enable) {
12829 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12830 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12831 #ifdef _LE_CAMERA_
12832 //Default to staggered HDR for IOT
12833 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12834 #else
12835 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12836 #endif
12837 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12838 }
12839
12840 bool isSupported = false;
12841 switch (sensor_hdr) {
12842 case CAM_SENSOR_HDR_IN_SENSOR:
12843 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12844 CAM_QCOM_FEATURE_SENSOR_HDR) {
12845 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012846 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012847 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012848 break;
12849 case CAM_SENSOR_HDR_ZIGZAG:
12850 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12851 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12852 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012853 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012854 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012855 break;
12856 case CAM_SENSOR_HDR_STAGGERED:
12857 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12858 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12859 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012860 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012861 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012862 break;
12863 case CAM_SENSOR_HDR_OFF:
12864 isSupported = true;
12865 LOGD("Turning off sensor HDR");
12866 break;
12867 default:
12868 LOGE("HDR mode %d not supported", sensor_hdr);
12869 rc = BAD_VALUE;
12870 break;
12871 }
12872
12873 if(isSupported) {
12874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12875 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12876 rc = BAD_VALUE;
12877 } else {
12878 if(!isVideoHdrEnable)
12879 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012880 }
12881 }
12882 return rc;
12883}
12884
12885/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012886 * FUNCTION : needRotationReprocess
12887 *
12888 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12889 *
12890 * PARAMETERS : none
12891 *
12892 * RETURN : true: needed
12893 * false: no need
12894 *==========================================================================*/
12895bool QCamera3HardwareInterface::needRotationReprocess()
12896{
12897 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12898 // current rotation is not zero, and pp has the capability to process rotation
12899 LOGH("need do reprocess for rotation");
12900 return true;
12901 }
12902
12903 return false;
12904}
12905
12906/*===========================================================================
12907 * FUNCTION : needReprocess
12908 *
12909 * DESCRIPTION: if reprocess in needed
12910 *
12911 * PARAMETERS : none
12912 *
12913 * RETURN : true: needed
12914 * false: no need
12915 *==========================================================================*/
12916bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12917{
12918 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12919 // TODO: add for ZSL HDR later
12920 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12921 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12922 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12923 return true;
12924 } else {
12925 LOGH("already post processed frame");
12926 return false;
12927 }
12928 }
12929 return needRotationReprocess();
12930}
12931
12932/*===========================================================================
12933 * FUNCTION : needJpegExifRotation
12934 *
12935 * DESCRIPTION: if rotation from jpeg is needed
12936 *
12937 * PARAMETERS : none
12938 *
12939 * RETURN : true: needed
12940 * false: no need
12941 *==========================================================================*/
12942bool QCamera3HardwareInterface::needJpegExifRotation()
12943{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012944 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012945 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12946 LOGD("Need use Jpeg EXIF Rotation");
12947 return true;
12948 }
12949 return false;
12950}
12951
12952/*===========================================================================
12953 * FUNCTION : addOfflineReprocChannel
12954 *
12955 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12956 * coming from input channel
12957 *
12958 * PARAMETERS :
12959 * @config : reprocess configuration
12960 * @inputChHandle : pointer to the input (source) channel
12961 *
12962 *
12963 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12964 *==========================================================================*/
12965QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12966 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12967{
12968 int32_t rc = NO_ERROR;
12969 QCamera3ReprocessChannel *pChannel = NULL;
12970
12971 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012972 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12973 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012974 if (NULL == pChannel) {
12975 LOGE("no mem for reprocess channel");
12976 return NULL;
12977 }
12978
12979 rc = pChannel->initialize(IS_TYPE_NONE);
12980 if (rc != NO_ERROR) {
12981 LOGE("init reprocess channel failed, ret = %d", rc);
12982 delete pChannel;
12983 return NULL;
12984 }
12985
12986 // pp feature config
12987 cam_pp_feature_config_t pp_config;
12988 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12989
12990 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12991 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12992 & CAM_QCOM_FEATURE_DSDN) {
12993 //Use CPP CDS incase h/w supports it.
12994 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12995 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12996 }
12997 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12998 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12999 }
13000
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013001 if (config.hdr_param.hdr_enable) {
13002 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13003 pp_config.hdr_param = config.hdr_param;
13004 }
13005
13006 if (mForceHdrSnapshot) {
13007 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13008 pp_config.hdr_param.hdr_enable = 1;
13009 pp_config.hdr_param.hdr_need_1x = 0;
13010 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13011 }
13012
Thierry Strudel3d639192016-09-09 11:52:26 -070013013 rc = pChannel->addReprocStreamsFromSource(pp_config,
13014 config,
13015 IS_TYPE_NONE,
13016 mMetadataChannel);
13017
13018 if (rc != NO_ERROR) {
13019 delete pChannel;
13020 return NULL;
13021 }
13022 return pChannel;
13023}
13024
13025/*===========================================================================
13026 * FUNCTION : getMobicatMask
13027 *
13028 * DESCRIPTION: returns mobicat mask
13029 *
13030 * PARAMETERS : none
13031 *
13032 * RETURN : mobicat mask
13033 *
13034 *==========================================================================*/
13035uint8_t QCamera3HardwareInterface::getMobicatMask()
13036{
13037 return m_MobicatMask;
13038}
13039
13040/*===========================================================================
13041 * FUNCTION : setMobicat
13042 *
13043 * DESCRIPTION: set Mobicat on/off.
13044 *
13045 * PARAMETERS :
13046 * @params : none
13047 *
13048 * RETURN : int32_t type of status
13049 * NO_ERROR -- success
13050 * none-zero failure code
13051 *==========================================================================*/
13052int32_t QCamera3HardwareInterface::setMobicat()
13053{
13054 char value [PROPERTY_VALUE_MAX];
13055 property_get("persist.camera.mobicat", value, "0");
13056 int32_t ret = NO_ERROR;
13057 uint8_t enableMobi = (uint8_t)atoi(value);
13058
13059 if (enableMobi) {
13060 tune_cmd_t tune_cmd;
13061 tune_cmd.type = SET_RELOAD_CHROMATIX;
13062 tune_cmd.module = MODULE_ALL;
13063 tune_cmd.value = TRUE;
13064 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13065 CAM_INTF_PARM_SET_VFE_COMMAND,
13066 tune_cmd);
13067
13068 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13069 CAM_INTF_PARM_SET_PP_COMMAND,
13070 tune_cmd);
13071 }
13072 m_MobicatMask = enableMobi;
13073
13074 return ret;
13075}
13076
13077/*===========================================================================
13078* FUNCTION : getLogLevel
13079*
13080* DESCRIPTION: Reads the log level property into a variable
13081*
13082* PARAMETERS :
13083* None
13084*
13085* RETURN :
13086* None
13087*==========================================================================*/
13088void QCamera3HardwareInterface::getLogLevel()
13089{
13090 char prop[PROPERTY_VALUE_MAX];
13091 uint32_t globalLogLevel = 0;
13092
13093 property_get("persist.camera.hal.debug", prop, "0");
13094 int val = atoi(prop);
13095 if (0 <= val) {
13096 gCamHal3LogLevel = (uint32_t)val;
13097 }
13098
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013099 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013100 gKpiDebugLevel = atoi(prop);
13101
13102 property_get("persist.camera.global.debug", prop, "0");
13103 val = atoi(prop);
13104 if (0 <= val) {
13105 globalLogLevel = (uint32_t)val;
13106 }
13107
13108 /* Highest log level among hal.logs and global.logs is selected */
13109 if (gCamHal3LogLevel < globalLogLevel)
13110 gCamHal3LogLevel = globalLogLevel;
13111
13112 return;
13113}
13114
13115/*===========================================================================
13116 * FUNCTION : validateStreamRotations
13117 *
13118 * DESCRIPTION: Check if the rotations requested are supported
13119 *
13120 * PARAMETERS :
13121 * @stream_list : streams to be configured
13122 *
13123 * RETURN : NO_ERROR on success
13124 * -EINVAL on failure
13125 *
13126 *==========================================================================*/
13127int QCamera3HardwareInterface::validateStreamRotations(
13128 camera3_stream_configuration_t *streamList)
13129{
13130 int rc = NO_ERROR;
13131
13132 /*
13133 * Loop through all streams requested in configuration
13134 * Check if unsupported rotations have been requested on any of them
13135 */
13136 for (size_t j = 0; j < streamList->num_streams; j++){
13137 camera3_stream_t *newStream = streamList->streams[j];
13138
13139 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13140 bool isImplDef = (newStream->format ==
13141 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13142 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13143 isImplDef);
13144
13145 if (isRotated && (!isImplDef || isZsl)) {
13146 LOGE("Error: Unsupported rotation of %d requested for stream"
13147 "type:%d and stream format:%d",
13148 newStream->rotation, newStream->stream_type,
13149 newStream->format);
13150 rc = -EINVAL;
13151 break;
13152 }
13153 }
13154
13155 return rc;
13156}
13157
13158/*===========================================================================
13159* FUNCTION : getFlashInfo
13160*
13161* DESCRIPTION: Retrieve information about whether the device has a flash.
13162*
13163* PARAMETERS :
13164* @cameraId : Camera id to query
13165* @hasFlash : Boolean indicating whether there is a flash device
13166* associated with given camera
13167* @flashNode : If a flash device exists, this will be its device node.
13168*
13169* RETURN :
13170* None
13171*==========================================================================*/
13172void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13173 bool& hasFlash,
13174 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13175{
13176 cam_capability_t* camCapability = gCamCapability[cameraId];
13177 if (NULL == camCapability) {
13178 hasFlash = false;
13179 flashNode[0] = '\0';
13180 } else {
13181 hasFlash = camCapability->flash_available;
13182 strlcpy(flashNode,
13183 (char*)camCapability->flash_dev_name,
13184 QCAMERA_MAX_FILEPATH_LENGTH);
13185 }
13186}
13187
13188/*===========================================================================
13189* FUNCTION : getEepromVersionInfo
13190*
13191* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13192*
13193* PARAMETERS : None
13194*
13195* RETURN : string describing EEPROM version
13196* "\0" if no such info available
13197*==========================================================================*/
13198const char *QCamera3HardwareInterface::getEepromVersionInfo()
13199{
13200 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13201}
13202
13203/*===========================================================================
13204* FUNCTION : getLdafCalib
13205*
13206* DESCRIPTION: Retrieve Laser AF calibration data
13207*
13208* PARAMETERS : None
13209*
13210* RETURN : Two uint32_t describing laser AF calibration data
13211* NULL if none is available.
13212*==========================================================================*/
13213const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13214{
13215 if (mLdafCalibExist) {
13216 return &mLdafCalib[0];
13217 } else {
13218 return NULL;
13219 }
13220}
13221
13222/*===========================================================================
13223 * FUNCTION : dynamicUpdateMetaStreamInfo
13224 *
13225 * DESCRIPTION: This function:
13226 * (1) stops all the channels
13227 * (2) returns error on pending requests and buffers
13228 * (3) sends metastream_info in setparams
13229 * (4) starts all channels
13230 * This is useful when sensor has to be restarted to apply any
13231 * settings such as frame rate from a different sensor mode
13232 *
13233 * PARAMETERS : None
13234 *
13235 * RETURN : NO_ERROR on success
13236 * Error codes on failure
13237 *
13238 *==========================================================================*/
13239int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13240{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013241 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013242 int rc = NO_ERROR;
13243
13244 LOGD("E");
13245
13246 rc = stopAllChannels();
13247 if (rc < 0) {
13248 LOGE("stopAllChannels failed");
13249 return rc;
13250 }
13251
13252 rc = notifyErrorForPendingRequests();
13253 if (rc < 0) {
13254 LOGE("notifyErrorForPendingRequests failed");
13255 return rc;
13256 }
13257
13258 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13259 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13260 "Format:%d",
13261 mStreamConfigInfo.type[i],
13262 mStreamConfigInfo.stream_sizes[i].width,
13263 mStreamConfigInfo.stream_sizes[i].height,
13264 mStreamConfigInfo.postprocess_mask[i],
13265 mStreamConfigInfo.format[i]);
13266 }
13267
13268 /* Send meta stream info once again so that ISP can start */
13269 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13270 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13271 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13272 mParameters);
13273 if (rc < 0) {
13274 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13275 }
13276
13277 rc = startAllChannels();
13278 if (rc < 0) {
13279 LOGE("startAllChannels failed");
13280 return rc;
13281 }
13282
13283 LOGD("X");
13284 return rc;
13285}
13286
13287/*===========================================================================
13288 * FUNCTION : stopAllChannels
13289 *
13290 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13291 *
13292 * PARAMETERS : None
13293 *
13294 * RETURN : NO_ERROR on success
13295 * Error codes on failure
13296 *
13297 *==========================================================================*/
13298int32_t QCamera3HardwareInterface::stopAllChannels()
13299{
13300 int32_t rc = NO_ERROR;
13301
13302 LOGD("Stopping all channels");
13303 // Stop the Streams/Channels
13304 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13305 it != mStreamInfo.end(); it++) {
13306 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13307 if (channel) {
13308 channel->stop();
13309 }
13310 (*it)->status = INVALID;
13311 }
13312
13313 if (mSupportChannel) {
13314 mSupportChannel->stop();
13315 }
13316 if (mAnalysisChannel) {
13317 mAnalysisChannel->stop();
13318 }
13319 if (mRawDumpChannel) {
13320 mRawDumpChannel->stop();
13321 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013322 if (mHdrPlusRawSrcChannel) {
13323 mHdrPlusRawSrcChannel->stop();
13324 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013325 if (mMetadataChannel) {
13326 /* If content of mStreamInfo is not 0, there is metadata stream */
13327 mMetadataChannel->stop();
13328 }
13329
13330 LOGD("All channels stopped");
13331 return rc;
13332}
13333
13334/*===========================================================================
13335 * FUNCTION : startAllChannels
13336 *
13337 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13338 *
13339 * PARAMETERS : None
13340 *
13341 * RETURN : NO_ERROR on success
13342 * Error codes on failure
13343 *
13344 *==========================================================================*/
13345int32_t QCamera3HardwareInterface::startAllChannels()
13346{
13347 int32_t rc = NO_ERROR;
13348
13349 LOGD("Start all channels ");
13350 // Start the Streams/Channels
13351 if (mMetadataChannel) {
13352 /* If content of mStreamInfo is not 0, there is metadata stream */
13353 rc = mMetadataChannel->start();
13354 if (rc < 0) {
13355 LOGE("META channel start failed");
13356 return rc;
13357 }
13358 }
13359 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13360 it != mStreamInfo.end(); it++) {
13361 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13362 if (channel) {
13363 rc = channel->start();
13364 if (rc < 0) {
13365 LOGE("channel start failed");
13366 return rc;
13367 }
13368 }
13369 }
13370 if (mAnalysisChannel) {
13371 mAnalysisChannel->start();
13372 }
13373 if (mSupportChannel) {
13374 rc = mSupportChannel->start();
13375 if (rc < 0) {
13376 LOGE("Support channel start failed");
13377 return rc;
13378 }
13379 }
13380 if (mRawDumpChannel) {
13381 rc = mRawDumpChannel->start();
13382 if (rc < 0) {
13383 LOGE("RAW dump channel start failed");
13384 return rc;
13385 }
13386 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013387 if (mHdrPlusRawSrcChannel) {
13388 rc = mHdrPlusRawSrcChannel->start();
13389 if (rc < 0) {
13390 LOGE("HDR+ RAW channel start failed");
13391 return rc;
13392 }
13393 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013394
13395 LOGD("All channels started");
13396 return rc;
13397}
13398
13399/*===========================================================================
13400 * FUNCTION : notifyErrorForPendingRequests
13401 *
13402 * DESCRIPTION: This function sends error for all the pending requests/buffers
13403 *
13404 * PARAMETERS : None
13405 *
13406 * RETURN : Error codes
13407 * NO_ERROR on success
13408 *
13409 *==========================================================================*/
13410int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13411{
13412 int32_t rc = NO_ERROR;
13413 unsigned int frameNum = 0;
13414 camera3_capture_result_t result;
13415 camera3_stream_buffer_t *pStream_Buf = NULL;
13416
13417 memset(&result, 0, sizeof(camera3_capture_result_t));
13418
13419 if (mPendingRequestsList.size() > 0) {
13420 pendingRequestIterator i = mPendingRequestsList.begin();
13421 frameNum = i->frame_number;
13422 } else {
13423 /* There might still be pending buffers even though there are
13424 no pending requests. Setting the frameNum to MAX so that
13425 all the buffers with smaller frame numbers are returned */
13426 frameNum = UINT_MAX;
13427 }
13428
13429 LOGH("Oldest frame num on mPendingRequestsList = %u",
13430 frameNum);
13431
Emilian Peev7650c122017-01-19 08:24:33 -080013432 notifyErrorFoPendingDepthData(mDepthChannel);
13433
Thierry Strudel3d639192016-09-09 11:52:26 -070013434 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13435 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13436
13437 if (req->frame_number < frameNum) {
13438 // Send Error notify to frameworks for each buffer for which
13439 // metadata buffer is already sent
13440 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13441 req->frame_number, req->mPendingBufferList.size());
13442
13443 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13444 if (NULL == pStream_Buf) {
13445 LOGE("No memory for pending buffers array");
13446 return NO_MEMORY;
13447 }
13448 memset(pStream_Buf, 0,
13449 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13450 result.result = NULL;
13451 result.frame_number = req->frame_number;
13452 result.num_output_buffers = req->mPendingBufferList.size();
13453 result.output_buffers = pStream_Buf;
13454
13455 size_t index = 0;
13456 for (auto info = req->mPendingBufferList.begin();
13457 info != req->mPendingBufferList.end(); ) {
13458
13459 camera3_notify_msg_t notify_msg;
13460 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13461 notify_msg.type = CAMERA3_MSG_ERROR;
13462 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13463 notify_msg.message.error.error_stream = info->stream;
13464 notify_msg.message.error.frame_number = req->frame_number;
13465 pStream_Buf[index].acquire_fence = -1;
13466 pStream_Buf[index].release_fence = -1;
13467 pStream_Buf[index].buffer = info->buffer;
13468 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13469 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013470 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013471 index++;
13472 // Remove buffer from list
13473 info = req->mPendingBufferList.erase(info);
13474 }
13475
13476 // Remove this request from Map
13477 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13478 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13479 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13480
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013481 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013482
13483 delete [] pStream_Buf;
13484 } else {
13485
13486 // Go through the pending requests info and send error request to framework
13487 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13488
13489 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13490
13491 // Send error notify to frameworks
13492 camera3_notify_msg_t notify_msg;
13493 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13494 notify_msg.type = CAMERA3_MSG_ERROR;
13495 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13496 notify_msg.message.error.error_stream = NULL;
13497 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013498 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013499
13500 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13501 if (NULL == pStream_Buf) {
13502 LOGE("No memory for pending buffers array");
13503 return NO_MEMORY;
13504 }
13505 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13506
13507 result.result = NULL;
13508 result.frame_number = req->frame_number;
13509 result.input_buffer = i->input_buffer;
13510 result.num_output_buffers = req->mPendingBufferList.size();
13511 result.output_buffers = pStream_Buf;
13512
13513 size_t index = 0;
13514 for (auto info = req->mPendingBufferList.begin();
13515 info != req->mPendingBufferList.end(); ) {
13516 pStream_Buf[index].acquire_fence = -1;
13517 pStream_Buf[index].release_fence = -1;
13518 pStream_Buf[index].buffer = info->buffer;
13519 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13520 pStream_Buf[index].stream = info->stream;
13521 index++;
13522 // Remove buffer from list
13523 info = req->mPendingBufferList.erase(info);
13524 }
13525
13526 // Remove this request from Map
13527 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13528 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13529 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13530
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013531 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013532 delete [] pStream_Buf;
13533 i = erasePendingRequest(i);
13534 }
13535 }
13536
13537 /* Reset pending frame Drop list and requests list */
13538 mPendingFrameDropList.clear();
13539
13540 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13541 req.mPendingBufferList.clear();
13542 }
13543 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013544 LOGH("Cleared all the pending buffers ");
13545
13546 return rc;
13547}
13548
13549bool QCamera3HardwareInterface::isOnEncoder(
13550 const cam_dimension_t max_viewfinder_size,
13551 uint32_t width, uint32_t height)
13552{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013553 return ((width > (uint32_t)max_viewfinder_size.width) ||
13554 (height > (uint32_t)max_viewfinder_size.height) ||
13555 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13556 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013557}
13558
13559/*===========================================================================
13560 * FUNCTION : setBundleInfo
13561 *
13562 * DESCRIPTION: Set bundle info for all streams that are bundle.
13563 *
13564 * PARAMETERS : None
13565 *
13566 * RETURN : NO_ERROR on success
13567 * Error codes on failure
13568 *==========================================================================*/
13569int32_t QCamera3HardwareInterface::setBundleInfo()
13570{
13571 int32_t rc = NO_ERROR;
13572
13573 if (mChannelHandle) {
13574 cam_bundle_config_t bundleInfo;
13575 memset(&bundleInfo, 0, sizeof(bundleInfo));
13576 rc = mCameraHandle->ops->get_bundle_info(
13577 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13578 if (rc != NO_ERROR) {
13579 LOGE("get_bundle_info failed");
13580 return rc;
13581 }
13582 if (mAnalysisChannel) {
13583 mAnalysisChannel->setBundleInfo(bundleInfo);
13584 }
13585 if (mSupportChannel) {
13586 mSupportChannel->setBundleInfo(bundleInfo);
13587 }
13588 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13589 it != mStreamInfo.end(); it++) {
13590 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13591 channel->setBundleInfo(bundleInfo);
13592 }
13593 if (mRawDumpChannel) {
13594 mRawDumpChannel->setBundleInfo(bundleInfo);
13595 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013596 if (mHdrPlusRawSrcChannel) {
13597 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13598 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013599 }
13600
13601 return rc;
13602}
13603
13604/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013605 * FUNCTION : setInstantAEC
13606 *
13607 * DESCRIPTION: Set Instant AEC related params.
13608 *
13609 * PARAMETERS :
13610 * @meta: CameraMetadata reference
13611 *
13612 * RETURN : NO_ERROR on success
13613 * Error codes on failure
13614 *==========================================================================*/
13615int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13616{
13617 int32_t rc = NO_ERROR;
13618 uint8_t val = 0;
13619 char prop[PROPERTY_VALUE_MAX];
13620
13621 // First try to configure instant AEC from framework metadata
13622 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13623 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13624 }
13625
13626 // If framework did not set this value, try to read from set prop.
13627 if (val == 0) {
13628 memset(prop, 0, sizeof(prop));
13629 property_get("persist.camera.instant.aec", prop, "0");
13630 val = (uint8_t)atoi(prop);
13631 }
13632
13633 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13634 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13635 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13636 mInstantAEC = val;
13637 mInstantAECSettledFrameNumber = 0;
13638 mInstantAecFrameIdxCount = 0;
13639 LOGH("instantAEC value set %d",val);
13640 if (mInstantAEC) {
13641 memset(prop, 0, sizeof(prop));
13642 property_get("persist.camera.ae.instant.bound", prop, "10");
13643 int32_t aec_frame_skip_cnt = atoi(prop);
13644 if (aec_frame_skip_cnt >= 0) {
13645 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13646 } else {
13647 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13648 rc = BAD_VALUE;
13649 }
13650 }
13651 } else {
13652 LOGE("Bad instant aec value set %d", val);
13653 rc = BAD_VALUE;
13654 }
13655 return rc;
13656}
13657
13658/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013659 * FUNCTION : get_num_overall_buffers
13660 *
13661 * DESCRIPTION: Estimate number of pending buffers across all requests.
13662 *
13663 * PARAMETERS : None
13664 *
13665 * RETURN : Number of overall pending buffers
13666 *
13667 *==========================================================================*/
13668uint32_t PendingBuffersMap::get_num_overall_buffers()
13669{
13670 uint32_t sum_buffers = 0;
13671 for (auto &req : mPendingBuffersInRequest) {
13672 sum_buffers += req.mPendingBufferList.size();
13673 }
13674 return sum_buffers;
13675}
13676
13677/*===========================================================================
13678 * FUNCTION : removeBuf
13679 *
13680 * DESCRIPTION: Remove a matching buffer from tracker.
13681 *
13682 * PARAMETERS : @buffer: image buffer for the callback
13683 *
13684 * RETURN : None
13685 *
13686 *==========================================================================*/
13687void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13688{
13689 bool buffer_found = false;
13690 for (auto req = mPendingBuffersInRequest.begin();
13691 req != mPendingBuffersInRequest.end(); req++) {
13692 for (auto k = req->mPendingBufferList.begin();
13693 k != req->mPendingBufferList.end(); k++ ) {
13694 if (k->buffer == buffer) {
13695 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13696 req->frame_number, buffer);
13697 k = req->mPendingBufferList.erase(k);
13698 if (req->mPendingBufferList.empty()) {
13699 // Remove this request from Map
13700 req = mPendingBuffersInRequest.erase(req);
13701 }
13702 buffer_found = true;
13703 break;
13704 }
13705 }
13706 if (buffer_found) {
13707 break;
13708 }
13709 }
13710 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13711 get_num_overall_buffers());
13712}
13713
13714/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013715 * FUNCTION : getBufErrStatus
13716 *
13717 * DESCRIPTION: get buffer error status
13718 *
13719 * PARAMETERS : @buffer: buffer handle
13720 *
13721 * RETURN : Error status
13722 *
13723 *==========================================================================*/
13724int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13725{
13726 for (auto& req : mPendingBuffersInRequest) {
13727 for (auto& k : req.mPendingBufferList) {
13728 if (k.buffer == buffer)
13729 return k.bufStatus;
13730 }
13731 }
13732 return CAMERA3_BUFFER_STATUS_OK;
13733}
13734
13735/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013736 * FUNCTION : setPAAFSupport
13737 *
13738 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13739 * feature mask according to stream type and filter
13740 * arrangement
13741 *
13742 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13743 * @stream_type: stream type
13744 * @filter_arrangement: filter arrangement
13745 *
13746 * RETURN : None
13747 *==========================================================================*/
13748void QCamera3HardwareInterface::setPAAFSupport(
13749 cam_feature_mask_t& feature_mask,
13750 cam_stream_type_t stream_type,
13751 cam_color_filter_arrangement_t filter_arrangement)
13752{
Thierry Strudel3d639192016-09-09 11:52:26 -070013753 switch (filter_arrangement) {
13754 case CAM_FILTER_ARRANGEMENT_RGGB:
13755 case CAM_FILTER_ARRANGEMENT_GRBG:
13756 case CAM_FILTER_ARRANGEMENT_GBRG:
13757 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013758 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13759 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013760 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013761 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13762 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013763 }
13764 break;
13765 case CAM_FILTER_ARRANGEMENT_Y:
13766 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13767 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13768 }
13769 break;
13770 default:
13771 break;
13772 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070013773 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13774 feature_mask, stream_type, filter_arrangement);
13775
13776
Thierry Strudel3d639192016-09-09 11:52:26 -070013777}
13778
13779/*===========================================================================
13780* FUNCTION : getSensorMountAngle
13781*
13782* DESCRIPTION: Retrieve sensor mount angle
13783*
13784* PARAMETERS : None
13785*
13786* RETURN : sensor mount angle in uint32_t
13787*==========================================================================*/
13788uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13789{
13790 return gCamCapability[mCameraId]->sensor_mount_angle;
13791}
13792
13793/*===========================================================================
13794* FUNCTION : getRelatedCalibrationData
13795*
13796* DESCRIPTION: Retrieve related system calibration data
13797*
13798* PARAMETERS : None
13799*
13800* RETURN : Pointer of related system calibration data
13801*==========================================================================*/
13802const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13803{
13804 return (const cam_related_system_calibration_data_t *)
13805 &(gCamCapability[mCameraId]->related_cam_calibration);
13806}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013807
13808/*===========================================================================
13809 * FUNCTION : is60HzZone
13810 *
13811 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13812 *
13813 * PARAMETERS : None
13814 *
13815 * RETURN : True if in 60Hz zone, False otherwise
13816 *==========================================================================*/
13817bool QCamera3HardwareInterface::is60HzZone()
13818{
13819 time_t t = time(NULL);
13820 struct tm lt;
13821
13822 struct tm* r = localtime_r(&t, &lt);
13823
13824 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13825 return true;
13826 else
13827 return false;
13828}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013829
13830/*===========================================================================
13831 * FUNCTION : adjustBlackLevelForCFA
13832 *
13833 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13834 * of bayer CFA (Color Filter Array).
13835 *
13836 * PARAMETERS : @input: black level pattern in the order of RGGB
13837 * @output: black level pattern in the order of CFA
13838 * @color_arrangement: CFA color arrangement
13839 *
13840 * RETURN : None
13841 *==========================================================================*/
13842template<typename T>
13843void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13844 T input[BLACK_LEVEL_PATTERN_CNT],
13845 T output[BLACK_LEVEL_PATTERN_CNT],
13846 cam_color_filter_arrangement_t color_arrangement)
13847{
13848 switch (color_arrangement) {
13849 case CAM_FILTER_ARRANGEMENT_GRBG:
13850 output[0] = input[1];
13851 output[1] = input[0];
13852 output[2] = input[3];
13853 output[3] = input[2];
13854 break;
13855 case CAM_FILTER_ARRANGEMENT_GBRG:
13856 output[0] = input[2];
13857 output[1] = input[3];
13858 output[2] = input[0];
13859 output[3] = input[1];
13860 break;
13861 case CAM_FILTER_ARRANGEMENT_BGGR:
13862 output[0] = input[3];
13863 output[1] = input[2];
13864 output[2] = input[1];
13865 output[3] = input[0];
13866 break;
13867 case CAM_FILTER_ARRANGEMENT_RGGB:
13868 output[0] = input[0];
13869 output[1] = input[1];
13870 output[2] = input[2];
13871 output[3] = input[3];
13872 break;
13873 default:
13874 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13875 break;
13876 }
13877}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013878
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013879void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13880 CameraMetadata &resultMetadata,
13881 std::shared_ptr<metadata_buffer_t> settings)
13882{
13883 if (settings == nullptr) {
13884 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13885 return;
13886 }
13887
13888 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13889 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13890 }
13891
13892 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13893 String8 str((const char *)gps_methods);
13894 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13895 }
13896
13897 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13898 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13899 }
13900
13901 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13902 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13903 }
13904
13905 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13906 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13907 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13908 }
13909
13910 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13911 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13912 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13913 }
13914
13915 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13916 int32_t fwk_thumb_size[2];
13917 fwk_thumb_size[0] = thumb_size->width;
13918 fwk_thumb_size[1] = thumb_size->height;
13919 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13920 }
13921
13922 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13923 uint8_t fwk_intent = intent[0];
13924 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13925 }
13926}
13927
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013928bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13929 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13930{
13931 if (hdrPlusRequest == nullptr) return false;
13932
13933 // Check noise reduction mode is high quality.
13934 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13935 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13936 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013937 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13938 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013939 return false;
13940 }
13941
13942 // Check edge mode is high quality.
13943 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13944 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13945 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13946 return false;
13947 }
13948
13949 if (request.num_output_buffers != 1 ||
13950 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13951 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013952 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13953 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13954 request.output_buffers[0].stream->width,
13955 request.output_buffers[0].stream->height,
13956 request.output_buffers[0].stream->format);
13957 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013958 return false;
13959 }
13960
13961 // Get a YUV buffer from pic channel.
13962 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13963 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13964 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13965 if (res != OK) {
13966 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13967 __FUNCTION__, strerror(-res), res);
13968 return false;
13969 }
13970
13971 pbcamera::StreamBuffer buffer;
13972 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080013973 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013974 buffer.data = yuvBuffer->buffer;
13975 buffer.dataSize = yuvBuffer->frame_len;
13976
13977 pbcamera::CaptureRequest pbRequest;
13978 pbRequest.id = request.frame_number;
13979 pbRequest.outputBuffers.push_back(buffer);
13980
13981 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013982 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013983 if (res != OK) {
13984 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13985 strerror(-res), res);
13986 return false;
13987 }
13988
13989 hdrPlusRequest->yuvBuffer = yuvBuffer;
13990 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13991
13992 return true;
13993}
13994
Chien-Yu Chenee335912017-02-09 17:53:20 -080013995status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13996{
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080013997 if (gHdrPlusClient == nullptr) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013998 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13999 return -ENODEV;
14000 }
14001
14002 // Connect to HDR+ service
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014003 status_t res = gHdrPlusClient->connect(this);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014004 if (res != OK) {
14005 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
14006 strerror(-res), res);
14007 return res;
14008 }
14009
14010 // Set static metadata.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014011 res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014012 if (res != OK) {
14013 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
14014 strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014015 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080014016 return res;
14017 }
14018
14019 // Configure stream for HDR+.
14020 res = configureHdrPlusStreamsLocked();
14021 if (res != OK) {
14022 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014023 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080014024 return res;
14025 }
14026
14027 mHdrPlusModeEnabled = true;
14028 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14029
14030 return OK;
14031}
14032
14033void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14034{
14035 // Disconnect from HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014036 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
14037 gHdrPlusClient->disconnect();
Chien-Yu Chenee335912017-02-09 17:53:20 -080014038 }
14039
14040 mHdrPlusModeEnabled = false;
14041 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14042}
14043
14044status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014045{
14046 pbcamera::InputConfiguration inputConfig;
14047 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14048 status_t res = OK;
14049
14050 // Configure HDR+ client streams.
14051 // Get input config.
14052 if (mHdrPlusRawSrcChannel) {
14053 // HDR+ input buffers will be provided by HAL.
14054 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14055 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14056 if (res != OK) {
14057 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14058 __FUNCTION__, strerror(-res), res);
14059 return res;
14060 }
14061
14062 inputConfig.isSensorInput = false;
14063 } else {
14064 // Sensor MIPI will send data to Easel.
14065 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014066 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014067 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14068 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14069 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14070 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14071 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14072 if (mSensorModeInfo.num_raw_bits != 10) {
14073 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14074 mSensorModeInfo.num_raw_bits);
14075 return BAD_VALUE;
14076 }
14077
14078 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014079 }
14080
14081 // Get output configurations.
14082 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014083 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014084
14085 // Easel may need to output YUV output buffers if mPictureChannel was created.
14086 pbcamera::StreamConfiguration yuvOutputConfig;
14087 if (mPictureChannel != nullptr) {
14088 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14089 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14090 if (res != OK) {
14091 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14092 __FUNCTION__, strerror(-res), res);
14093
14094 return res;
14095 }
14096
14097 outputStreamConfigs.push_back(yuvOutputConfig);
14098 }
14099
14100 // TODO: consider other channels for YUV output buffers.
14101
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014102 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014103 if (res != OK) {
14104 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14105 strerror(-res), res);
14106 return res;
14107 }
14108
14109 return OK;
14110}
14111
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014112void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14113 const camera_metadata_t &resultMetadata) {
14114 if (result != nullptr) {
14115 if (result->outputBuffers.size() != 1) {
14116 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14117 result->outputBuffers.size());
14118 return;
14119 }
14120
14121 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14122 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14123 result->outputBuffers[0].streamId);
14124 return;
14125 }
14126
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014127 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014128 HdrPlusPendingRequest pendingRequest;
14129 {
14130 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14131 auto req = mHdrPlusPendingRequests.find(result->requestId);
14132 pendingRequest = req->second;
14133 }
14134
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014135 // Update the result metadata with the settings of the HDR+ still capture request because
14136 // the result metadata belongs to a ZSL buffer.
14137 CameraMetadata metadata;
14138 metadata = &resultMetadata;
14139 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14140 camera_metadata_t* updatedResultMetadata = metadata.release();
14141
14142 QCamera3PicChannel *picChannel =
14143 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14144
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014145 // Check if dumping HDR+ YUV output is enabled.
14146 char prop[PROPERTY_VALUE_MAX];
14147 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14148 bool dumpYuvOutput = atoi(prop);
14149
14150 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014151 // Dump yuv buffer to a ppm file.
14152 pbcamera::StreamConfiguration outputConfig;
14153 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14154 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14155 if (rc == OK) {
14156 char buf[FILENAME_MAX] = {};
14157 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14158 result->requestId, result->outputBuffers[0].streamId,
14159 outputConfig.image.width, outputConfig.image.height);
14160
14161 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14162 } else {
14163 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14164 __FUNCTION__, strerror(-rc), rc);
14165 }
14166 }
14167
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014168 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14169 auto halMetadata = std::make_shared<metadata_buffer_t>();
14170 clear_metadata_buffer(halMetadata.get());
14171
14172 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14173 // encoding.
14174 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14175 halStreamId, /*minFrameDuration*/0);
14176 if (res == OK) {
14177 // Return the buffer to pic channel for encoding.
14178 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14179 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14180 halMetadata);
14181 } else {
14182 // Return the buffer without encoding.
14183 // TODO: This should not happen but we may want to report an error buffer to camera
14184 // service.
14185 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14186 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14187 strerror(-res), res);
14188 }
14189
14190 // Send HDR+ metadata to framework.
14191 {
14192 pthread_mutex_lock(&mMutex);
14193
14194 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14195 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14196 pthread_mutex_unlock(&mMutex);
14197 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014198
14199 // Remove the HDR+ pending request.
14200 {
14201 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14202 auto req = mHdrPlusPendingRequests.find(result->requestId);
14203 mHdrPlusPendingRequests.erase(req);
14204 }
14205 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014206}
14207
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014208void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14209 // TODO: Handle HDR+ capture failures and send the failure to framework.
14210 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14211 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14212
14213 // Return the buffer to pic channel.
14214 QCamera3PicChannel *picChannel =
14215 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14216 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14217
14218 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014219}
14220
Thierry Strudel3d639192016-09-09 11:52:26 -070014221}; //end namespace qcamera