blob: 12fa825318ec5eb28ee76078554a6bbe35380bc4 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700150std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100462 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800463 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mChannelHandle(0),
465 mFirstConfiguration(true),
466 mFlush(false),
467 mFlushPerf(false),
468 mParamHeap(NULL),
469 mParameters(NULL),
470 mPrevParameters(NULL),
471 m_bIsVideo(false),
472 m_bIs4KVideo(false),
473 m_bEisSupportedSize(false),
474 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800475 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700477 mShutterDispatcher(this),
478 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mMinProcessedFrameDuration(0),
480 mMinJpegFrameDuration(0),
481 mMinRawFrameDuration(0),
482 mMetaFrameCount(0U),
483 mUpdateDebugLevel(false),
484 mCallbacks(callbacks),
485 mCaptureIntent(0),
486 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700487 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800488 /* DevCamDebug metadata internal m control*/
489 mDevCamDebugMetaEnable(0),
490 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700491 mBatchSize(0),
492 mToBeQueuedVidBufs(0),
493 mHFRVideoFps(DEFAULT_VIDEO_FPS),
494 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800495 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800496 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700497 mFirstFrameNumberInBatch(0),
498 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800499 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700500 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
501 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000502 mPDSupported(false),
503 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700504 mInstantAEC(false),
505 mResetInstantAEC(false),
506 mInstantAECSettledFrameNumber(0),
507 mAecSkipDisplayFrameBound(0),
508 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700524{
525 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700526 mCommon.init(gCamCapability[cameraId]);
527 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#ifndef USE_HAL_3_3
529 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
530#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700533 mCameraDevice.common.close = close_camera_device;
534 mCameraDevice.ops = &mCameraOps;
535 mCameraDevice.priv = this;
536 gCamCapability[cameraId]->version = CAM_HAL_V3;
537 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
538 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
539 gCamCapability[cameraId]->min_num_pp_bufs = 3;
540
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800541 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800543 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700544 mPendingLiveRequest = 0;
545 mCurrentRequestId = -1;
546 pthread_mutex_init(&mMutex, NULL);
547
548 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
549 mDefaultMetadata[i] = NULL;
550
551 // Getting system props of different kinds
552 char prop[PROPERTY_VALUE_MAX];
553 memset(prop, 0, sizeof(prop));
554 property_get("persist.camera.raw.dump", prop, "0");
555 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800556 property_get("persist.camera.hal3.force.hdr", prop, "0");
557 mForceHdrSnapshot = atoi(prop);
558
Thierry Strudel3d639192016-09-09 11:52:26 -0700559 if (mEnableRawDump)
560 LOGD("Raw dump from Camera HAL enabled");
561
562 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
563 memset(mLdafCalib, 0, sizeof(mLdafCalib));
564
565 memset(prop, 0, sizeof(prop));
566 property_get("persist.camera.tnr.preview", prop, "0");
567 m_bTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800570 property_get("persist.camera.swtnr.preview", prop, "1");
571 m_bSwTnrPreview = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700574 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700575 m_bTnrVideo = (uint8_t)atoi(prop);
576
577 memset(prop, 0, sizeof(prop));
578 property_get("persist.camera.avtimer.debug", prop, "0");
579 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800580 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700581
Thierry Strudel54dc9782017-02-15 12:12:10 -0800582 memset(prop, 0, sizeof(prop));
583 property_get("persist.camera.cacmode.disable", prop, "0");
584 m_cacModeDisabled = (uint8_t)atoi(prop);
585
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 //Load and read GPU library.
587 lib_surface_utils = NULL;
588 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700589 mSurfaceStridePadding = CAM_PAD_TO_64;
590#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700591 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
592 if (lib_surface_utils) {
593 *(void **)&LINK_get_surface_pixel_alignment =
594 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
595 if (LINK_get_surface_pixel_alignment) {
596 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
597 }
598 dlclose(lib_surface_utils);
599 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700600#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000601 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
602 mPDSupported = (0 <= mPDIndex) ? true : false;
603
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700604 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700605}
606
607/*===========================================================================
608 * FUNCTION : ~QCamera3HardwareInterface
609 *
610 * DESCRIPTION: destructor of QCamera3HardwareInterface
611 *
612 * PARAMETERS : none
613 *
614 * RETURN : none
615 *==========================================================================*/
616QCamera3HardwareInterface::~QCamera3HardwareInterface()
617{
618 LOGD("E");
619
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800620 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700621
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800622 // Disable power hint and enable the perf lock for close camera
623 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
624 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
625
626 // unlink of dualcam during close camera
627 if (mIsDeviceLinked) {
628 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
629 &m_pDualCamCmdPtr->bundle_info;
630 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
631 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
632 pthread_mutex_lock(&gCamLock);
633
634 if (mIsMainCamera == 1) {
635 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
636 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
637 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
638 // related session id should be session id of linked session
639 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
640 } else {
641 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
642 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
643 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
644 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
645 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800646 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800647 pthread_mutex_unlock(&gCamLock);
648
649 rc = mCameraHandle->ops->set_dual_cam_cmd(
650 mCameraHandle->camera_handle);
651 if (rc < 0) {
652 LOGE("Dualcam: Unlink failed, but still proceed to close");
653 }
654 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700655
656 /* We need to stop all streams before deleting any stream */
657 if (mRawDumpChannel) {
658 mRawDumpChannel->stop();
659 }
660
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700661 if (mHdrPlusRawSrcChannel) {
662 mHdrPlusRawSrcChannel->stop();
663 }
664
Thierry Strudel3d639192016-09-09 11:52:26 -0700665 // NOTE: 'camera3_stream_t *' objects are already freed at
666 // this stage by the framework
667 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
668 it != mStreamInfo.end(); it++) {
669 QCamera3ProcessingChannel *channel = (*it)->channel;
670 if (channel) {
671 channel->stop();
672 }
673 }
674 if (mSupportChannel)
675 mSupportChannel->stop();
676
677 if (mAnalysisChannel) {
678 mAnalysisChannel->stop();
679 }
680 if (mMetadataChannel) {
681 mMetadataChannel->stop();
682 }
683 if (mChannelHandle) {
684 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700685 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700686 LOGD("stopping channel %d", mChannelHandle);
687 }
688
689 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
690 it != mStreamInfo.end(); it++) {
691 QCamera3ProcessingChannel *channel = (*it)->channel;
692 if (channel)
693 delete channel;
694 free (*it);
695 }
696 if (mSupportChannel) {
697 delete mSupportChannel;
698 mSupportChannel = NULL;
699 }
700
701 if (mAnalysisChannel) {
702 delete mAnalysisChannel;
703 mAnalysisChannel = NULL;
704 }
705 if (mRawDumpChannel) {
706 delete mRawDumpChannel;
707 mRawDumpChannel = NULL;
708 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700709 if (mHdrPlusRawSrcChannel) {
710 delete mHdrPlusRawSrcChannel;
711 mHdrPlusRawSrcChannel = NULL;
712 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700713 if (mDummyBatchChannel) {
714 delete mDummyBatchChannel;
715 mDummyBatchChannel = NULL;
716 }
717
718 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800719 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700720
721 if (mMetadataChannel) {
722 delete mMetadataChannel;
723 mMetadataChannel = NULL;
724 }
725
726 /* Clean up all channels */
727 if (mCameraInitialized) {
728 if(!mFirstConfiguration){
729 //send the last unconfigure
730 cam_stream_size_info_t stream_config_info;
731 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
732 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
733 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800734 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700735 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700736 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700737 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
738 stream_config_info);
739 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
740 if (rc < 0) {
741 LOGE("set_parms failed for unconfigure");
742 }
743 }
744 deinitParameters();
745 }
746
747 if (mChannelHandle) {
748 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
749 mChannelHandle);
750 LOGH("deleting channel %d", mChannelHandle);
751 mChannelHandle = 0;
752 }
753
754 if (mState != CLOSED)
755 closeCamera();
756
757 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
758 req.mPendingBufferList.clear();
759 }
760 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700761 for (pendingRequestIterator i = mPendingRequestsList.begin();
762 i != mPendingRequestsList.end();) {
763 i = erasePendingRequest(i);
764 }
765 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
766 if (mDefaultMetadata[i])
767 free_camera_metadata(mDefaultMetadata[i]);
768
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800769 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700770
771 pthread_cond_destroy(&mRequestCond);
772
773 pthread_cond_destroy(&mBuffersCond);
774
775 pthread_mutex_destroy(&mMutex);
776 LOGD("X");
777}
778
779/*===========================================================================
780 * FUNCTION : erasePendingRequest
781 *
782 * DESCRIPTION: function to erase a desired pending request after freeing any
783 * allocated memory
784 *
785 * PARAMETERS :
786 * @i : iterator pointing to pending request to be erased
787 *
788 * RETURN : iterator pointing to the next request
789 *==========================================================================*/
790QCamera3HardwareInterface::pendingRequestIterator
791 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
792{
793 if (i->input_buffer != NULL) {
794 free(i->input_buffer);
795 i->input_buffer = NULL;
796 }
797 if (i->settings != NULL)
798 free_camera_metadata((camera_metadata_t*)i->settings);
799 return mPendingRequestsList.erase(i);
800}
801
802/*===========================================================================
803 * FUNCTION : camEvtHandle
804 *
805 * DESCRIPTION: Function registered to mm-camera-interface to handle events
806 *
807 * PARAMETERS :
808 * @camera_handle : interface layer camera handle
809 * @evt : ptr to event
810 * @user_data : user data ptr
811 *
812 * RETURN : none
813 *==========================================================================*/
814void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
815 mm_camera_event_t *evt,
816 void *user_data)
817{
818 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
819 if (obj && evt) {
820 switch(evt->server_event_type) {
821 case CAM_EVENT_TYPE_DAEMON_DIED:
822 pthread_mutex_lock(&obj->mMutex);
823 obj->mState = ERROR;
824 pthread_mutex_unlock(&obj->mMutex);
825 LOGE("Fatal, camera daemon died");
826 break;
827
828 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
829 LOGD("HAL got request pull from Daemon");
830 pthread_mutex_lock(&obj->mMutex);
831 obj->mWokenUpByDaemon = true;
832 obj->unblockRequestIfNecessary();
833 pthread_mutex_unlock(&obj->mMutex);
834 break;
835
836 default:
837 LOGW("Warning: Unhandled event %d",
838 evt->server_event_type);
839 break;
840 }
841 } else {
842 LOGE("NULL user_data/evt");
843 }
844}
845
846/*===========================================================================
847 * FUNCTION : openCamera
848 *
849 * DESCRIPTION: open camera
850 *
851 * PARAMETERS :
852 * @hw_device : double ptr for camera device struct
853 *
854 * RETURN : int32_t type of status
855 * NO_ERROR -- success
856 * none-zero failure code
857 *==========================================================================*/
858int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
859{
860 int rc = 0;
861 if (mState != CLOSED) {
862 *hw_device = NULL;
863 return PERMISSION_DENIED;
864 }
865
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700866 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800867 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
869 mCameraId);
870
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700871 if (mCameraHandle) {
872 LOGE("Failure: Camera already opened");
873 return ALREADY_EXISTS;
874 }
875
876 {
877 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700878 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700879 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700880 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700881 if (rc != 0) {
882 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
883 return rc;
884 }
885 }
886 }
887
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 rc = openCamera();
889 if (rc == 0) {
890 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800891 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700892 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700893
894 // Suspend Easel because opening camera failed.
895 {
896 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700897 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
898 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700899 if (suspendErr != 0) {
900 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
901 strerror(-suspendErr), suspendErr);
902 }
903 }
904 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800905 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700906
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
908 mCameraId, rc);
909
910 if (rc == NO_ERROR) {
911 mState = OPENED;
912 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800913
Thierry Strudel3d639192016-09-09 11:52:26 -0700914 return rc;
915}
916
917/*===========================================================================
918 * FUNCTION : openCamera
919 *
920 * DESCRIPTION: open camera
921 *
922 * PARAMETERS : none
923 *
924 * RETURN : int32_t type of status
925 * NO_ERROR -- success
926 * none-zero failure code
927 *==========================================================================*/
928int QCamera3HardwareInterface::openCamera()
929{
930 int rc = 0;
931 char value[PROPERTY_VALUE_MAX];
932
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800933 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800934
Thierry Strudel3d639192016-09-09 11:52:26 -0700935 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
936 if (rc < 0) {
937 LOGE("Failed to reserve flash for camera id: %d",
938 mCameraId);
939 return UNKNOWN_ERROR;
940 }
941
942 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
943 if (rc) {
944 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
945 return rc;
946 }
947
948 if (!mCameraHandle) {
949 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
950 return -ENODEV;
951 }
952
953 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
954 camEvtHandle, (void *)this);
955
956 if (rc < 0) {
957 LOGE("Error, failed to register event callback");
958 /* Not closing camera here since it is already handled in destructor */
959 return FAILED_TRANSACTION;
960 }
961
962 mExifParams.debug_params =
963 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
964 if (mExifParams.debug_params) {
965 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
966 } else {
967 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
968 return NO_MEMORY;
969 }
970 mFirstConfiguration = true;
971
972 //Notify display HAL that a camera session is active.
973 //But avoid calling the same during bootup because camera service might open/close
974 //cameras at boot time during its initialization and display service will also internally
975 //wait for camera service to initialize first while calling this display API, resulting in a
976 //deadlock situation. Since boot time camera open/close calls are made only to fetch
977 //capabilities, no need of this display bw optimization.
978 //Use "service.bootanim.exit" property to know boot status.
979 property_get("service.bootanim.exit", value, "0");
980 if (atoi(value) == 1) {
981 pthread_mutex_lock(&gCamLock);
982 if (gNumCameraSessions++ == 0) {
983 setCameraLaunchStatus(true);
984 }
985 pthread_mutex_unlock(&gCamLock);
986 }
987
988 //fill the session id needed while linking dual cam
989 pthread_mutex_lock(&gCamLock);
990 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
991 &sessionId[mCameraId]);
992 pthread_mutex_unlock(&gCamLock);
993
994 if (rc < 0) {
995 LOGE("Error, failed to get sessiion id");
996 return UNKNOWN_ERROR;
997 } else {
998 //Allocate related cam sync buffer
999 //this is needed for the payload that goes along with bundling cmd for related
1000 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001001 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1002 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001003 if(rc != OK) {
1004 rc = NO_MEMORY;
1005 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1006 return NO_MEMORY;
1007 }
1008
1009 //Map memory for related cam sync buffer
1010 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001011 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1012 m_pDualCamCmdHeap->getFd(0),
1013 sizeof(cam_dual_camera_cmd_info_t),
1014 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001015 if(rc < 0) {
1016 LOGE("Dualcam: failed to map Related cam sync buffer");
1017 rc = FAILED_TRANSACTION;
1018 return NO_MEMORY;
1019 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001020 m_pDualCamCmdPtr =
1021 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 }
1023
1024 LOGH("mCameraId=%d",mCameraId);
1025
1026 return NO_ERROR;
1027}
1028
1029/*===========================================================================
1030 * FUNCTION : closeCamera
1031 *
1032 * DESCRIPTION: close camera
1033 *
1034 * PARAMETERS : none
1035 *
1036 * RETURN : int32_t type of status
1037 * NO_ERROR -- success
1038 * none-zero failure code
1039 *==========================================================================*/
1040int QCamera3HardwareInterface::closeCamera()
1041{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001042 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001043 int rc = NO_ERROR;
1044 char value[PROPERTY_VALUE_MAX];
1045
1046 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1047 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001048
1049 // unmap memory for related cam sync buffer
1050 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001051 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001052 if (NULL != m_pDualCamCmdHeap) {
1053 m_pDualCamCmdHeap->deallocate();
1054 delete m_pDualCamCmdHeap;
1055 m_pDualCamCmdHeap = NULL;
1056 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001057 }
1058
Thierry Strudel3d639192016-09-09 11:52:26 -07001059 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1060 mCameraHandle = NULL;
1061
1062 //reset session id to some invalid id
1063 pthread_mutex_lock(&gCamLock);
1064 sessionId[mCameraId] = 0xDEADBEEF;
1065 pthread_mutex_unlock(&gCamLock);
1066
1067 //Notify display HAL that there is no active camera session
1068 //but avoid calling the same during bootup. Refer to openCamera
1069 //for more details.
1070 property_get("service.bootanim.exit", value, "0");
1071 if (atoi(value) == 1) {
1072 pthread_mutex_lock(&gCamLock);
1073 if (--gNumCameraSessions == 0) {
1074 setCameraLaunchStatus(false);
1075 }
1076 pthread_mutex_unlock(&gCamLock);
1077 }
1078
Thierry Strudel3d639192016-09-09 11:52:26 -07001079 if (mExifParams.debug_params) {
1080 free(mExifParams.debug_params);
1081 mExifParams.debug_params = NULL;
1082 }
1083 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1084 LOGW("Failed to release flash for camera id: %d",
1085 mCameraId);
1086 }
1087 mState = CLOSED;
1088 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1089 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001090
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001091 {
1092 Mutex::Autolock l(gHdrPlusClientLock);
1093 if (gHdrPlusClient != nullptr) {
1094 // Disable HDR+ mode.
1095 disableHdrPlusModeLocked();
1096 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001097 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001098 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001099 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001100
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001101 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001102 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001103 if (rc != 0) {
1104 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1105 }
1106
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001107 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001108 if (rc != 0) {
1109 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1110 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001111 }
1112 }
1113
Thierry Strudel3d639192016-09-09 11:52:26 -07001114 return rc;
1115}
1116
1117/*===========================================================================
1118 * FUNCTION : initialize
1119 *
1120 * DESCRIPTION: Initialize frameworks callback functions
1121 *
1122 * PARAMETERS :
1123 * @callback_ops : callback function to frameworks
1124 *
1125 * RETURN :
1126 *
1127 *==========================================================================*/
1128int QCamera3HardwareInterface::initialize(
1129 const struct camera3_callback_ops *callback_ops)
1130{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001131 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001132 int rc;
1133
1134 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1135 pthread_mutex_lock(&mMutex);
1136
1137 // Validate current state
1138 switch (mState) {
1139 case OPENED:
1140 /* valid state */
1141 break;
1142 default:
1143 LOGE("Invalid state %d", mState);
1144 rc = -ENODEV;
1145 goto err1;
1146 }
1147
1148 rc = initParameters();
1149 if (rc < 0) {
1150 LOGE("initParamters failed %d", rc);
1151 goto err1;
1152 }
1153 mCallbackOps = callback_ops;
1154
1155 mChannelHandle = mCameraHandle->ops->add_channel(
1156 mCameraHandle->camera_handle, NULL, NULL, this);
1157 if (mChannelHandle == 0) {
1158 LOGE("add_channel failed");
1159 rc = -ENOMEM;
1160 pthread_mutex_unlock(&mMutex);
1161 return rc;
1162 }
1163
1164 pthread_mutex_unlock(&mMutex);
1165 mCameraInitialized = true;
1166 mState = INITIALIZED;
1167 LOGI("X");
1168 return 0;
1169
1170err1:
1171 pthread_mutex_unlock(&mMutex);
1172 return rc;
1173}
1174
1175/*===========================================================================
1176 * FUNCTION : validateStreamDimensions
1177 *
1178 * DESCRIPTION: Check if the configuration requested are those advertised
1179 *
1180 * PARAMETERS :
1181 * @stream_list : streams to be configured
1182 *
1183 * RETURN :
1184 *
1185 *==========================================================================*/
1186int QCamera3HardwareInterface::validateStreamDimensions(
1187 camera3_stream_configuration_t *streamList)
1188{
1189 int rc = NO_ERROR;
1190 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 uint32_t depthWidth = 0;
1192 uint32_t depthHeight = 0;
1193 if (mPDSupported) {
1194 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1195 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001197
1198 camera3_stream_t *inputStream = NULL;
1199 /*
1200 * Loop through all streams to find input stream if it exists*
1201 */
1202 for (size_t i = 0; i< streamList->num_streams; i++) {
1203 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1204 if (inputStream != NULL) {
1205 LOGE("Error, Multiple input streams requested");
1206 return -EINVAL;
1207 }
1208 inputStream = streamList->streams[i];
1209 }
1210 }
1211 /*
1212 * Loop through all streams requested in configuration
1213 * Check if unsupported sizes have been requested on any of them
1214 */
1215 for (size_t j = 0; j < streamList->num_streams; j++) {
1216 bool sizeFound = false;
1217 camera3_stream_t *newStream = streamList->streams[j];
1218
1219 uint32_t rotatedHeight = newStream->height;
1220 uint32_t rotatedWidth = newStream->width;
1221 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1222 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1223 rotatedHeight = newStream->width;
1224 rotatedWidth = newStream->height;
1225 }
1226
1227 /*
1228 * Sizes are different for each type of stream format check against
1229 * appropriate table.
1230 */
1231 switch (newStream->format) {
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1234 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001235 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1236 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1237 mPDSupported) {
1238 if ((depthWidth == newStream->width) &&
1239 (depthHeight == newStream->height)) {
1240 sizeFound = true;
1241 }
1242 break;
1243 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001244 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1245 for (size_t i = 0; i < count; i++) {
1246 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1247 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001254 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1255 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001256 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001257 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001258 if ((depthSamplesCount == newStream->width) &&
1259 (1 == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1265 /* Verify set size against generated sizes table */
1266 for (size_t i = 0; i < count; i++) {
1267 if (((int32_t)rotatedWidth ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1269 ((int32_t)rotatedHeight ==
1270 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1271 sizeFound = true;
1272 break;
1273 }
1274 }
1275 break;
1276 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1277 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1278 default:
1279 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1280 || newStream->stream_type == CAMERA3_STREAM_INPUT
1281 || IS_USAGE_ZSL(newStream->usage)) {
1282 if (((int32_t)rotatedWidth ==
1283 gCamCapability[mCameraId]->active_array_size.width) &&
1284 ((int32_t)rotatedHeight ==
1285 gCamCapability[mCameraId]->active_array_size.height)) {
1286 sizeFound = true;
1287 break;
1288 }
1289 /* We could potentially break here to enforce ZSL stream
1290 * set from frameworks always is full active array size
1291 * but it is not clear from the spc if framework will always
1292 * follow that, also we have logic to override to full array
1293 * size, so keeping the logic lenient at the moment
1294 */
1295 }
1296 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1297 MAX_SIZES_CNT);
1298 for (size_t i = 0; i < count; i++) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 }
1307 break;
1308 } /* End of switch(newStream->format) */
1309
1310 /* We error out even if a single stream has unsupported size set */
1311 if (!sizeFound) {
1312 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1313 rotatedWidth, rotatedHeight, newStream->format,
1314 gCamCapability[mCameraId]->active_array_size.width,
1315 gCamCapability[mCameraId]->active_array_size.height);
1316 rc = -EINVAL;
1317 break;
1318 }
1319 } /* End of for each stream */
1320 return rc;
1321}
1322
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001323/*===========================================================================
1324 * FUNCTION : validateUsageFlags
1325 *
1326 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1327 *
1328 * PARAMETERS :
1329 * @stream_list : streams to be configured
1330 *
1331 * RETURN :
1332 * NO_ERROR if the usage flags are supported
1333 * error code if usage flags are not supported
1334 *
1335 *==========================================================================*/
1336int QCamera3HardwareInterface::validateUsageFlags(
1337 const camera3_stream_configuration_t* streamList)
1338{
1339 for (size_t j = 0; j < streamList->num_streams; j++) {
1340 const camera3_stream_t *newStream = streamList->streams[j];
1341
1342 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1343 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1344 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1345 continue;
1346 }
1347
Jason Leec4cf5032017-05-24 18:31:41 -07001348 // Here we only care whether it's EIS3 or not
1349 char is_type_value[PROPERTY_VALUE_MAX];
1350 property_get("persist.camera.is_type", is_type_value, "4");
1351 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1352 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1353 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1354 isType = IS_TYPE_NONE;
1355
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001356 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1357 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1358 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1359 bool forcePreviewUBWC = true;
1360 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1361 forcePreviewUBWC = false;
1362 }
1363 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001364 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001365 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001366 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001367 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001368 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001369
1370 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1371 // So color spaces will always match.
1372
1373 // Check whether underlying formats of shared streams match.
1374 if (isVideo && isPreview && videoFormat != previewFormat) {
1375 LOGE("Combined video and preview usage flag is not supported");
1376 return -EINVAL;
1377 }
1378 if (isPreview && isZSL && previewFormat != zslFormat) {
1379 LOGE("Combined preview and zsl usage flag is not supported");
1380 return -EINVAL;
1381 }
1382 if (isVideo && isZSL && videoFormat != zslFormat) {
1383 LOGE("Combined video and zsl usage flag is not supported");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
1390/*===========================================================================
1391 * FUNCTION : validateUsageFlagsForEis
1392 *
1393 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 *
1398 * RETURN :
1399 * NO_ERROR if the usage flags are supported
1400 * error code if usage flags are not supported
1401 *
1402 *==========================================================================*/
1403int QCamera3HardwareInterface::validateUsageFlagsForEis(
1404 const camera3_stream_configuration_t* streamList)
1405{
1406 for (size_t j = 0; j < streamList->num_streams; j++) {
1407 const camera3_stream_t *newStream = streamList->streams[j];
1408
1409 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1410 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1411
1412 // Because EIS is "hard-coded" for certain use case, and current
1413 // implementation doesn't support shared preview and video on the same
1414 // stream, return failure if EIS is forced on.
1415 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1416 LOGE("Combined video and preview usage flag is not supported due to EIS");
1417 return -EINVAL;
1418 }
1419 }
1420 return NO_ERROR;
1421}
1422
Thierry Strudel3d639192016-09-09 11:52:26 -07001423/*==============================================================================
1424 * FUNCTION : isSupportChannelNeeded
1425 *
1426 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1427 *
1428 * PARAMETERS :
1429 * @stream_list : streams to be configured
1430 * @stream_config_info : the config info for streams to be configured
1431 *
1432 * RETURN : Boolen true/false decision
1433 *
1434 *==========================================================================*/
1435bool QCamera3HardwareInterface::isSupportChannelNeeded(
1436 camera3_stream_configuration_t *streamList,
1437 cam_stream_size_info_t stream_config_info)
1438{
1439 uint32_t i;
1440 bool pprocRequested = false;
1441 /* Check for conditions where PProc pipeline does not have any streams*/
1442 for (i = 0; i < stream_config_info.num_streams; i++) {
1443 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1444 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1445 pprocRequested = true;
1446 break;
1447 }
1448 }
1449
1450 if (pprocRequested == false )
1451 return true;
1452
1453 /* Dummy stream needed if only raw or jpeg streams present */
1454 for (i = 0; i < streamList->num_streams; i++) {
1455 switch(streamList->streams[i]->format) {
1456 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1457 case HAL_PIXEL_FORMAT_RAW10:
1458 case HAL_PIXEL_FORMAT_RAW16:
1459 case HAL_PIXEL_FORMAT_BLOB:
1460 break;
1461 default:
1462 return false;
1463 }
1464 }
1465 return true;
1466}
1467
1468/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001470 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001471 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001472 *
1473 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001474 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001475 *
1476 * RETURN : int32_t type of status
1477 * NO_ERROR -- success
1478 * none-zero failure code
1479 *
1480 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001482{
1483 int32_t rc = NO_ERROR;
1484
1485 cam_dimension_t max_dim = {0, 0};
1486 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1487 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1488 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1489 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1490 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1491 }
1492
1493 clear_metadata_buffer(mParameters);
1494
1495 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1496 max_dim);
1497 if (rc != NO_ERROR) {
1498 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1499 return rc;
1500 }
1501
1502 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1503 if (rc != NO_ERROR) {
1504 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1505 return rc;
1506 }
1507
1508 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001509 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001510
1511 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1512 mParameters);
1513 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001514 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001515 return rc;
1516 }
1517
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001518 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001519 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1520 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1521 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1522 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1523 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001524
1525 return rc;
1526}
1527
1528/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001529 * FUNCTION : getCurrentSensorModeInfo
1530 *
1531 * DESCRIPTION: Get sensor mode information that is currently selected.
1532 *
1533 * PARAMETERS :
1534 * @sensorModeInfo : sensor mode information (output)
1535 *
1536 * RETURN : int32_t type of status
1537 * NO_ERROR -- success
1538 * none-zero failure code
1539 *
1540 *==========================================================================*/
1541int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1542{
1543 int32_t rc = NO_ERROR;
1544
1545 clear_metadata_buffer(mParameters);
1546 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1547
1548 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1549 mParameters);
1550 if (rc != NO_ERROR) {
1551 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1552 return rc;
1553 }
1554
1555 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1556 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1557 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1558 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1559 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1560 sensorModeInfo.num_raw_bits);
1561
1562 return rc;
1563}
1564
1565/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001566 * FUNCTION : addToPPFeatureMask
1567 *
1568 * DESCRIPTION: add additional features to pp feature mask based on
1569 * stream type and usecase
1570 *
1571 * PARAMETERS :
1572 * @stream_format : stream type for feature mask
1573 * @stream_idx : stream idx within postprocess_mask list to change
1574 *
1575 * RETURN : NULL
1576 *
1577 *==========================================================================*/
1578void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1579 uint32_t stream_idx)
1580{
1581 char feature_mask_value[PROPERTY_VALUE_MAX];
1582 cam_feature_mask_t feature_mask;
1583 int args_converted;
1584 int property_len;
1585
1586 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001587#ifdef _LE_CAMERA_
1588 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1589 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1590 property_len = property_get("persist.camera.hal3.feature",
1591 feature_mask_value, swtnr_feature_mask_value);
1592#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001593 property_len = property_get("persist.camera.hal3.feature",
1594 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001595#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001596 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1597 (feature_mask_value[1] == 'x')) {
1598 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1599 } else {
1600 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1601 }
1602 if (1 != args_converted) {
1603 feature_mask = 0;
1604 LOGE("Wrong feature mask %s", feature_mask_value);
1605 return;
1606 }
1607
1608 switch (stream_format) {
1609 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1610 /* Add LLVD to pp feature mask only if video hint is enabled */
1611 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1612 mStreamConfigInfo.postprocess_mask[stream_idx]
1613 |= CAM_QTI_FEATURE_SW_TNR;
1614 LOGH("Added SW TNR to pp feature mask");
1615 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1616 mStreamConfigInfo.postprocess_mask[stream_idx]
1617 |= CAM_QCOM_FEATURE_LLVD;
1618 LOGH("Added LLVD SeeMore to pp feature mask");
1619 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001620 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1621 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1622 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1623 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001624 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1625 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1626 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1627 CAM_QTI_FEATURE_BINNING_CORRECTION;
1628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001629 break;
1630 }
1631 default:
1632 break;
1633 }
1634 LOGD("PP feature mask %llx",
1635 mStreamConfigInfo.postprocess_mask[stream_idx]);
1636}
1637
1638/*==============================================================================
1639 * FUNCTION : updateFpsInPreviewBuffer
1640 *
1641 * DESCRIPTION: update FPS information in preview buffer.
1642 *
1643 * PARAMETERS :
1644 * @metadata : pointer to metadata buffer
1645 * @frame_number: frame_number to look for in pending buffer list
1646 *
1647 * RETURN : None
1648 *
1649 *==========================================================================*/
1650void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1651 uint32_t frame_number)
1652{
1653 // Mark all pending buffers for this particular request
1654 // with corresponding framerate information
1655 for (List<PendingBuffersInRequest>::iterator req =
1656 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1657 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1658 for(List<PendingBufferInfo>::iterator j =
1659 req->mPendingBufferList.begin();
1660 j != req->mPendingBufferList.end(); j++) {
1661 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1662 if ((req->frame_number == frame_number) &&
1663 (channel->getStreamTypeMask() &
1664 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1665 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1666 CAM_INTF_PARM_FPS_RANGE, metadata) {
1667 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1668 struct private_handle_t *priv_handle =
1669 (struct private_handle_t *)(*(j->buffer));
1670 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1671 }
1672 }
1673 }
1674 }
1675}
1676
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001677/*==============================================================================
1678 * FUNCTION : updateTimeStampInPendingBuffers
1679 *
1680 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1681 * of a frame number
1682 *
1683 * PARAMETERS :
1684 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1685 * @timestamp : timestamp to be set
1686 *
1687 * RETURN : None
1688 *
1689 *==========================================================================*/
1690void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1691 uint32_t frameNumber, nsecs_t timestamp)
1692{
1693 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1694 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1695 if (req->frame_number != frameNumber)
1696 continue;
1697
1698 for (auto k = req->mPendingBufferList.begin();
1699 k != req->mPendingBufferList.end(); k++ ) {
1700 struct private_handle_t *priv_handle =
1701 (struct private_handle_t *) (*(k->buffer));
1702 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1703 }
1704 }
1705 return;
1706}
1707
Thierry Strudel3d639192016-09-09 11:52:26 -07001708/*===========================================================================
1709 * FUNCTION : configureStreams
1710 *
1711 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1712 * and output streams.
1713 *
1714 * PARAMETERS :
1715 * @stream_list : streams to be configured
1716 *
1717 * RETURN :
1718 *
1719 *==========================================================================*/
1720int QCamera3HardwareInterface::configureStreams(
1721 camera3_stream_configuration_t *streamList)
1722{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001723 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001724 int rc = 0;
1725
1726 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001727 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001728 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001729 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001730
1731 return rc;
1732}
1733
1734/*===========================================================================
1735 * FUNCTION : configureStreamsPerfLocked
1736 *
1737 * DESCRIPTION: configureStreams while perfLock is held.
1738 *
1739 * PARAMETERS :
1740 * @stream_list : streams to be configured
1741 *
1742 * RETURN : int32_t type of status
1743 * NO_ERROR -- success
1744 * none-zero failure code
1745 *==========================================================================*/
1746int QCamera3HardwareInterface::configureStreamsPerfLocked(
1747 camera3_stream_configuration_t *streamList)
1748{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001749 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001750 int rc = 0;
1751
1752 // Sanity check stream_list
1753 if (streamList == NULL) {
1754 LOGE("NULL stream configuration");
1755 return BAD_VALUE;
1756 }
1757 if (streamList->streams == NULL) {
1758 LOGE("NULL stream list");
1759 return BAD_VALUE;
1760 }
1761
1762 if (streamList->num_streams < 1) {
1763 LOGE("Bad number of streams requested: %d",
1764 streamList->num_streams);
1765 return BAD_VALUE;
1766 }
1767
1768 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1769 LOGE("Maximum number of streams %d exceeded: %d",
1770 MAX_NUM_STREAMS, streamList->num_streams);
1771 return BAD_VALUE;
1772 }
1773
Jason Leec4cf5032017-05-24 18:31:41 -07001774 mOpMode = streamList->operation_mode;
1775 LOGD("mOpMode: %d", mOpMode);
1776
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001777 rc = validateUsageFlags(streamList);
1778 if (rc != NO_ERROR) {
1779 return rc;
1780 }
1781
Thierry Strudel3d639192016-09-09 11:52:26 -07001782 /* first invalidate all the steams in the mStreamList
1783 * if they appear again, they will be validated */
1784 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1785 it != mStreamInfo.end(); it++) {
1786 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1787 if (channel) {
1788 channel->stop();
1789 }
1790 (*it)->status = INVALID;
1791 }
1792
1793 if (mRawDumpChannel) {
1794 mRawDumpChannel->stop();
1795 delete mRawDumpChannel;
1796 mRawDumpChannel = NULL;
1797 }
1798
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001799 if (mHdrPlusRawSrcChannel) {
1800 mHdrPlusRawSrcChannel->stop();
1801 delete mHdrPlusRawSrcChannel;
1802 mHdrPlusRawSrcChannel = NULL;
1803 }
1804
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 if (mSupportChannel)
1806 mSupportChannel->stop();
1807
1808 if (mAnalysisChannel) {
1809 mAnalysisChannel->stop();
1810 }
1811 if (mMetadataChannel) {
1812 /* If content of mStreamInfo is not 0, there is metadata stream */
1813 mMetadataChannel->stop();
1814 }
1815 if (mChannelHandle) {
1816 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001817 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001818 LOGD("stopping channel %d", mChannelHandle);
1819 }
1820
1821 pthread_mutex_lock(&mMutex);
1822
1823 // Check state
1824 switch (mState) {
1825 case INITIALIZED:
1826 case CONFIGURED:
1827 case STARTED:
1828 /* valid state */
1829 break;
1830 default:
1831 LOGE("Invalid state %d", mState);
1832 pthread_mutex_unlock(&mMutex);
1833 return -ENODEV;
1834 }
1835
1836 /* Check whether we have video stream */
1837 m_bIs4KVideo = false;
1838 m_bIsVideo = false;
1839 m_bEisSupportedSize = false;
1840 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001841 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001843 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001844 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 uint32_t videoWidth = 0U;
1846 uint32_t videoHeight = 0U;
1847 size_t rawStreamCnt = 0;
1848 size_t stallStreamCnt = 0;
1849 size_t processedStreamCnt = 0;
1850 // Number of streams on ISP encoder path
1851 size_t numStreamsOnEncoder = 0;
1852 size_t numYuv888OnEncoder = 0;
1853 bool bYuv888OverrideJpeg = false;
1854 cam_dimension_t largeYuv888Size = {0, 0};
1855 cam_dimension_t maxViewfinderSize = {0, 0};
1856 bool bJpegExceeds4K = false;
1857 bool bJpegOnEncoder = false;
1858 bool bUseCommonFeatureMask = false;
1859 cam_feature_mask_t commonFeatureMask = 0;
1860 bool bSmallJpegSize = false;
1861 uint32_t width_ratio;
1862 uint32_t height_ratio;
1863 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1864 camera3_stream_t *inputStream = NULL;
1865 bool isJpeg = false;
1866 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001867 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001868 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001869
1870 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1871
1872 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001873 uint8_t eis_prop_set;
1874 uint32_t maxEisWidth = 0;
1875 uint32_t maxEisHeight = 0;
1876
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001877 // Initialize all instant AEC related variables
1878 mInstantAEC = false;
1879 mResetInstantAEC = false;
1880 mInstantAECSettledFrameNumber = 0;
1881 mAecSkipDisplayFrameBound = 0;
1882 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001883 mCurrFeatureState = 0;
1884 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001885
Thierry Strudel3d639192016-09-09 11:52:26 -07001886 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1887
1888 size_t count = IS_TYPE_MAX;
1889 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1890 for (size_t i = 0; i < count; i++) {
1891 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001892 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1893 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 break;
1895 }
1896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001897
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001898 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001899 maxEisWidth = MAX_EIS_WIDTH;
1900 maxEisHeight = MAX_EIS_HEIGHT;
1901 }
1902
1903 /* EIS setprop control */
1904 char eis_prop[PROPERTY_VALUE_MAX];
1905 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001906 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001907 eis_prop_set = (uint8_t)atoi(eis_prop);
1908
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001909 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1911
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001912 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1913 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001914
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 /* stream configurations */
1916 for (size_t i = 0; i < streamList->num_streams; i++) {
1917 camera3_stream_t *newStream = streamList->streams[i];
1918 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1919 "height = %d, rotation = %d, usage = 0x%x",
1920 i, newStream->stream_type, newStream->format,
1921 newStream->width, newStream->height, newStream->rotation,
1922 newStream->usage);
1923 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1924 newStream->stream_type == CAMERA3_STREAM_INPUT){
1925 isZsl = true;
1926 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001927 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1928 IS_USAGE_PREVIEW(newStream->usage)) {
1929 isPreview = true;
1930 }
1931
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1933 inputStream = newStream;
1934 }
1935
Emilian Peev7650c122017-01-19 08:24:33 -08001936 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1937 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 isJpeg = true;
1939 jpegSize.width = newStream->width;
1940 jpegSize.height = newStream->height;
1941 if (newStream->width > VIDEO_4K_WIDTH ||
1942 newStream->height > VIDEO_4K_HEIGHT)
1943 bJpegExceeds4K = true;
1944 }
1945
1946 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1947 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1948 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001949 // In HAL3 we can have multiple different video streams.
1950 // The variables video width and height are used below as
1951 // dimensions of the biggest of them
1952 if (videoWidth < newStream->width ||
1953 videoHeight < newStream->height) {
1954 videoWidth = newStream->width;
1955 videoHeight = newStream->height;
1956 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001957 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1958 (VIDEO_4K_HEIGHT <= newStream->height)) {
1959 m_bIs4KVideo = true;
1960 }
1961 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1962 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001963
Thierry Strudel3d639192016-09-09 11:52:26 -07001964 }
1965 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1966 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1967 switch (newStream->format) {
1968 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001969 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1970 depthPresent = true;
1971 break;
1972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 stallStreamCnt++;
1974 if (isOnEncoder(maxViewfinderSize, newStream->width,
1975 newStream->height)) {
1976 numStreamsOnEncoder++;
1977 bJpegOnEncoder = true;
1978 }
1979 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1980 newStream->width);
1981 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1982 newStream->height);;
1983 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1984 "FATAL: max_downscale_factor cannot be zero and so assert");
1985 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1986 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1987 LOGH("Setting small jpeg size flag to true");
1988 bSmallJpegSize = true;
1989 }
1990 break;
1991 case HAL_PIXEL_FORMAT_RAW10:
1992 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1993 case HAL_PIXEL_FORMAT_RAW16:
1994 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001995 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1996 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1997 pdStatCount++;
1998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001999 break;
2000 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2001 processedStreamCnt++;
2002 if (isOnEncoder(maxViewfinderSize, newStream->width,
2003 newStream->height)) {
2004 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2005 !IS_USAGE_ZSL(newStream->usage)) {
2006 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2007 }
2008 numStreamsOnEncoder++;
2009 }
2010 break;
2011 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2012 processedStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 // If Yuv888 size is not greater than 4K, set feature mask
2016 // to SUPERSET so that it support concurrent request on
2017 // YUV and JPEG.
2018 if (newStream->width <= VIDEO_4K_WIDTH &&
2019 newStream->height <= VIDEO_4K_HEIGHT) {
2020 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2021 }
2022 numStreamsOnEncoder++;
2023 numYuv888OnEncoder++;
2024 largeYuv888Size.width = newStream->width;
2025 largeYuv888Size.height = newStream->height;
2026 }
2027 break;
2028 default:
2029 processedStreamCnt++;
2030 if (isOnEncoder(maxViewfinderSize, newStream->width,
2031 newStream->height)) {
2032 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2033 numStreamsOnEncoder++;
2034 }
2035 break;
2036 }
2037
2038 }
2039 }
2040
2041 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2042 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2043 !m_bIsVideo) {
2044 m_bEisEnable = false;
2045 }
2046
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002047 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2048 pthread_mutex_unlock(&mMutex);
2049 return -EINVAL;
2050 }
2051
Thierry Strudel54dc9782017-02-15 12:12:10 -08002052 uint8_t forceEnableTnr = 0;
2053 char tnr_prop[PROPERTY_VALUE_MAX];
2054 memset(tnr_prop, 0, sizeof(tnr_prop));
2055 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2056 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2057
Thierry Strudel3d639192016-09-09 11:52:26 -07002058 /* Logic to enable/disable TNR based on specific config size/etc.*/
2059 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002060 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2061 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002062 else if (forceEnableTnr)
2063 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002064
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002065 char videoHdrProp[PROPERTY_VALUE_MAX];
2066 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2067 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2068 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2069
2070 if (hdr_mode_prop == 1 && m_bIsVideo &&
2071 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2072 m_bVideoHdrEnabled = true;
2073 else
2074 m_bVideoHdrEnabled = false;
2075
2076
Thierry Strudel3d639192016-09-09 11:52:26 -07002077 /* Check if num_streams is sane */
2078 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2079 rawStreamCnt > MAX_RAW_STREAMS ||
2080 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2081 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2082 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2083 pthread_mutex_unlock(&mMutex);
2084 return -EINVAL;
2085 }
2086 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002087 if (isZsl && m_bIs4KVideo) {
2088 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 pthread_mutex_unlock(&mMutex);
2090 return -EINVAL;
2091 }
2092 /* Check if stream sizes are sane */
2093 if (numStreamsOnEncoder > 2) {
2094 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 } else if (1 < numStreamsOnEncoder){
2098 bUseCommonFeatureMask = true;
2099 LOGH("Multiple streams above max viewfinder size, common mask needed");
2100 }
2101
2102 /* Check if BLOB size is greater than 4k in 4k recording case */
2103 if (m_bIs4KVideo && bJpegExceeds4K) {
2104 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2105 pthread_mutex_unlock(&mMutex);
2106 return -EINVAL;
2107 }
2108
Emilian Peev7650c122017-01-19 08:24:33 -08002109 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2110 depthPresent) {
2111 LOGE("HAL doesn't support depth streams in HFR mode!");
2112 pthread_mutex_unlock(&mMutex);
2113 return -EINVAL;
2114 }
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2117 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2118 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2119 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2120 // configurations:
2121 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2122 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2123 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2124 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2125 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2126 __func__);
2127 pthread_mutex_unlock(&mMutex);
2128 return -EINVAL;
2129 }
2130
2131 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2132 // the YUV stream's size is greater or equal to the JPEG size, set common
2133 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2134 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2135 jpegSize.width, jpegSize.height) &&
2136 largeYuv888Size.width > jpegSize.width &&
2137 largeYuv888Size.height > jpegSize.height) {
2138 bYuv888OverrideJpeg = true;
2139 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2140 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2141 }
2142
2143 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2144 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2145 commonFeatureMask);
2146 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2147 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2148
2149 rc = validateStreamDimensions(streamList);
2150 if (rc == NO_ERROR) {
2151 rc = validateStreamRotations(streamList);
2152 }
2153 if (rc != NO_ERROR) {
2154 LOGE("Invalid stream configuration requested!");
2155 pthread_mutex_unlock(&mMutex);
2156 return rc;
2157 }
2158
Emilian Peev0f3c3162017-03-15 12:57:46 +00002159 if (1 < pdStatCount) {
2160 LOGE("HAL doesn't support multiple PD streams");
2161 pthread_mutex_unlock(&mMutex);
2162 return -EINVAL;
2163 }
2164
2165 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2166 (1 == pdStatCount)) {
2167 LOGE("HAL doesn't support PD streams in HFR mode!");
2168 pthread_mutex_unlock(&mMutex);
2169 return -EINVAL;
2170 }
2171
Thierry Strudel3d639192016-09-09 11:52:26 -07002172 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2173 for (size_t i = 0; i < streamList->num_streams; i++) {
2174 camera3_stream_t *newStream = streamList->streams[i];
2175 LOGH("newStream type = %d, stream format = %d "
2176 "stream size : %d x %d, stream rotation = %d",
2177 newStream->stream_type, newStream->format,
2178 newStream->width, newStream->height, newStream->rotation);
2179 //if the stream is in the mStreamList validate it
2180 bool stream_exists = false;
2181 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2182 it != mStreamInfo.end(); it++) {
2183 if ((*it)->stream == newStream) {
2184 QCamera3ProcessingChannel *channel =
2185 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2186 stream_exists = true;
2187 if (channel)
2188 delete channel;
2189 (*it)->status = VALID;
2190 (*it)->stream->priv = NULL;
2191 (*it)->channel = NULL;
2192 }
2193 }
2194 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2195 //new stream
2196 stream_info_t* stream_info;
2197 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2198 if (!stream_info) {
2199 LOGE("Could not allocate stream info");
2200 rc = -ENOMEM;
2201 pthread_mutex_unlock(&mMutex);
2202 return rc;
2203 }
2204 stream_info->stream = newStream;
2205 stream_info->status = VALID;
2206 stream_info->channel = NULL;
2207 mStreamInfo.push_back(stream_info);
2208 }
2209 /* Covers Opaque ZSL and API1 F/W ZSL */
2210 if (IS_USAGE_ZSL(newStream->usage)
2211 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2212 if (zslStream != NULL) {
2213 LOGE("Multiple input/reprocess streams requested!");
2214 pthread_mutex_unlock(&mMutex);
2215 return BAD_VALUE;
2216 }
2217 zslStream = newStream;
2218 }
2219 /* Covers YUV reprocess */
2220 if (inputStream != NULL) {
2221 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2222 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2223 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->width == newStream->width
2225 && inputStream->height == newStream->height) {
2226 if (zslStream != NULL) {
2227 /* This scenario indicates multiple YUV streams with same size
2228 * as input stream have been requested, since zsl stream handle
2229 * is solely use for the purpose of overriding the size of streams
2230 * which share h/w streams we will just make a guess here as to
2231 * which of the stream is a ZSL stream, this will be refactored
2232 * once we make generic logic for streams sharing encoder output
2233 */
2234 LOGH("Warning, Multiple ip/reprocess streams requested!");
2235 }
2236 zslStream = newStream;
2237 }
2238 }
2239 }
2240
2241 /* If a zsl stream is set, we know that we have configured at least one input or
2242 bidirectional stream */
2243 if (NULL != zslStream) {
2244 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2245 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2246 mInputStreamInfo.format = zslStream->format;
2247 mInputStreamInfo.usage = zslStream->usage;
2248 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2249 mInputStreamInfo.dim.width,
2250 mInputStreamInfo.dim.height,
2251 mInputStreamInfo.format, mInputStreamInfo.usage);
2252 }
2253
2254 cleanAndSortStreamInfo();
2255 if (mMetadataChannel) {
2256 delete mMetadataChannel;
2257 mMetadataChannel = NULL;
2258 }
2259 if (mSupportChannel) {
2260 delete mSupportChannel;
2261 mSupportChannel = NULL;
2262 }
2263
2264 if (mAnalysisChannel) {
2265 delete mAnalysisChannel;
2266 mAnalysisChannel = NULL;
2267 }
2268
2269 if (mDummyBatchChannel) {
2270 delete mDummyBatchChannel;
2271 mDummyBatchChannel = NULL;
2272 }
2273
Emilian Peev7650c122017-01-19 08:24:33 -08002274 if (mDepthChannel) {
2275 mDepthChannel = NULL;
2276 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002277 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002278
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002279 mShutterDispatcher.clear();
2280 mOutputBufferDispatcher.clear();
2281
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 char is_type_value[PROPERTY_VALUE_MAX];
2283 property_get("persist.camera.is_type", is_type_value, "4");
2284 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2285
Binhao Line406f062017-05-03 14:39:44 -07002286 char property_value[PROPERTY_VALUE_MAX];
2287 property_get("persist.camera.gzoom.at", property_value, "0");
2288 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002289 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2290 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2291 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2292 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002293
2294 property_get("persist.camera.gzoom.4k", property_value, "0");
2295 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2296
Thierry Strudel3d639192016-09-09 11:52:26 -07002297 //Create metadata channel and initialize it
2298 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2299 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2300 gCamCapability[mCameraId]->color_arrangement);
2301 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2302 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002303 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 if (mMetadataChannel == NULL) {
2305 LOGE("failed to allocate metadata channel");
2306 rc = -ENOMEM;
2307 pthread_mutex_unlock(&mMutex);
2308 return rc;
2309 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002310 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2312 if (rc < 0) {
2313 LOGE("metadata channel initialization failed");
2314 delete mMetadataChannel;
2315 mMetadataChannel = NULL;
2316 pthread_mutex_unlock(&mMutex);
2317 return rc;
2318 }
2319
Thierry Strudel2896d122017-02-23 19:18:03 -08002320 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002323 // Keep track of preview/video streams indices.
2324 // There could be more than one preview streams, but only one video stream.
2325 int32_t video_stream_idx = -1;
2326 int32_t preview_stream_idx[streamList->num_streams];
2327 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002328 bool previewTnr[streamList->num_streams];
2329 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2330 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2331 // Loop through once to determine preview TNR conditions before creating channels.
2332 for (size_t i = 0; i < streamList->num_streams; i++) {
2333 camera3_stream_t *newStream = streamList->streams[i];
2334 uint32_t stream_usage = newStream->usage;
2335 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2336 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2337 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2338 video_stream_idx = (int32_t)i;
2339 else
2340 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2341 }
2342 }
2343 // By default, preview stream TNR is disabled.
2344 // Enable TNR to the preview stream if all conditions below are satisfied:
2345 // 1. preview resolution == video resolution.
2346 // 2. video stream TNR is enabled.
2347 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2348 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2349 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2350 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2351 if (m_bTnrEnabled && m_bTnrVideo &&
2352 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2353 video_stream->width == preview_stream->width &&
2354 video_stream->height == preview_stream->height) {
2355 previewTnr[preview_stream_idx[i]] = true;
2356 }
2357 }
2358
Thierry Strudel3d639192016-09-09 11:52:26 -07002359 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2360 /* Allocate channel objects for the requested streams */
2361 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002362
Thierry Strudel3d639192016-09-09 11:52:26 -07002363 camera3_stream_t *newStream = streamList->streams[i];
2364 uint32_t stream_usage = newStream->usage;
2365 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2367 struct camera_info *p_info = NULL;
2368 pthread_mutex_lock(&gCamLock);
2369 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2370 pthread_mutex_unlock(&gCamLock);
2371 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2372 || IS_USAGE_ZSL(newStream->usage)) &&
2373 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002374 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002376 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2377 if (bUseCommonFeatureMask)
2378 zsl_ppmask = commonFeatureMask;
2379 else
2380 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002381 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002382 if (numStreamsOnEncoder > 0)
2383 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2384 else
2385 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002386 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002388 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002389 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002390 LOGH("Input stream configured, reprocess config");
2391 } else {
2392 //for non zsl streams find out the format
2393 switch (newStream->format) {
2394 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2395 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002396 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2398 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2399 /* add additional features to pp feature mask */
2400 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2401 mStreamConfigInfo.num_streams);
2402
2403 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2404 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2405 CAM_STREAM_TYPE_VIDEO;
2406 if (m_bTnrEnabled && m_bTnrVideo) {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2408 CAM_QCOM_FEATURE_CPP_TNR;
2409 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2410 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2411 ~CAM_QCOM_FEATURE_CDS;
2412 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002413 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2414 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2415 CAM_QTI_FEATURE_PPEISCORE;
2416 }
Binhao Line406f062017-05-03 14:39:44 -07002417 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2418 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2419 CAM_QCOM_FEATURE_GOOG_ZOOM;
2420 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
2422 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2423 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002424 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002425 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2426 CAM_QCOM_FEATURE_CPP_TNR;
2427 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2429 ~CAM_QCOM_FEATURE_CDS;
2430 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002431 if(!m_bSwTnrPreview) {
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2433 ~CAM_QTI_FEATURE_SW_TNR;
2434 }
Binhao Line406f062017-05-03 14:39:44 -07002435 if (is_goog_zoom_preview_enabled) {
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2437 CAM_QCOM_FEATURE_GOOG_ZOOM;
2438 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002439 padding_info.width_padding = mSurfaceStridePadding;
2440 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002441 previewSize.width = (int32_t)newStream->width;
2442 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002443 }
2444 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2445 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2446 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2447 newStream->height;
2448 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2449 newStream->width;
2450 }
2451 }
2452 break;
2453 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002454 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2456 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2457 if (bUseCommonFeatureMask)
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2459 commonFeatureMask;
2460 else
2461 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2462 CAM_QCOM_FEATURE_NONE;
2463 } else {
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2465 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2466 }
2467 break;
2468 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002469 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002470 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2471 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2472 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2473 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2474 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002475 /* Remove rotation if it is not supported
2476 for 4K LiveVideo snapshot case (online processing) */
2477 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2478 CAM_QCOM_FEATURE_ROTATION)) {
2479 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2480 &= ~CAM_QCOM_FEATURE_ROTATION;
2481 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002482 } else {
2483 if (bUseCommonFeatureMask &&
2484 isOnEncoder(maxViewfinderSize, newStream->width,
2485 newStream->height)) {
2486 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2487 } else {
2488 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2489 }
2490 }
2491 if (isZsl) {
2492 if (zslStream) {
2493 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2494 (int32_t)zslStream->width;
2495 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2496 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002497 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2498 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002499 } else {
2500 LOGE("Error, No ZSL stream identified");
2501 pthread_mutex_unlock(&mMutex);
2502 return -EINVAL;
2503 }
2504 } else if (m_bIs4KVideo) {
2505 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2506 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2507 } else if (bYuv888OverrideJpeg) {
2508 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2509 (int32_t)largeYuv888Size.width;
2510 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2511 (int32_t)largeYuv888Size.height;
2512 }
2513 break;
2514 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2515 case HAL_PIXEL_FORMAT_RAW16:
2516 case HAL_PIXEL_FORMAT_RAW10:
2517 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2518 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2519 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002520 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2521 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2522 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2523 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2524 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2525 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2526 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2527 gCamCapability[mCameraId]->dt[mPDIndex];
2528 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2529 gCamCapability[mCameraId]->vc[mPDIndex];
2530 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 break;
2532 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002533 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002534 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2535 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2536 break;
2537 }
2538 }
2539
2540 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2541 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2542 gCamCapability[mCameraId]->color_arrangement);
2543
2544 if (newStream->priv == NULL) {
2545 //New stream, construct channel
2546 switch (newStream->stream_type) {
2547 case CAMERA3_STREAM_INPUT:
2548 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2549 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2550 break;
2551 case CAMERA3_STREAM_BIDIRECTIONAL:
2552 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2553 GRALLOC_USAGE_HW_CAMERA_WRITE;
2554 break;
2555 case CAMERA3_STREAM_OUTPUT:
2556 /* For video encoding stream, set read/write rarely
2557 * flag so that they may be set to un-cached */
2558 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2559 newStream->usage |=
2560 (GRALLOC_USAGE_SW_READ_RARELY |
2561 GRALLOC_USAGE_SW_WRITE_RARELY |
2562 GRALLOC_USAGE_HW_CAMERA_WRITE);
2563 else if (IS_USAGE_ZSL(newStream->usage))
2564 {
2565 LOGD("ZSL usage flag skipping");
2566 }
2567 else if (newStream == zslStream
2568 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2569 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2570 } else
2571 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2572 break;
2573 default:
2574 LOGE("Invalid stream_type %d", newStream->stream_type);
2575 break;
2576 }
2577
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002578 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002579 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2580 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2581 QCamera3ProcessingChannel *channel = NULL;
2582 switch (newStream->format) {
2583 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2584 if ((newStream->usage &
2585 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2586 (streamList->operation_mode ==
2587 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2588 ) {
2589 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2590 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002591 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002592 this,
2593 newStream,
2594 (cam_stream_type_t)
2595 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2596 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2597 mMetadataChannel,
2598 0); //heap buffers are not required for HFR video channel
2599 if (channel == NULL) {
2600 LOGE("allocation of channel failed");
2601 pthread_mutex_unlock(&mMutex);
2602 return -ENOMEM;
2603 }
2604 //channel->getNumBuffers() will return 0 here so use
2605 //MAX_INFLIGH_HFR_REQUESTS
2606 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2607 newStream->priv = channel;
2608 LOGI("num video buffers in HFR mode: %d",
2609 MAX_INFLIGHT_HFR_REQUESTS);
2610 } else {
2611 /* Copy stream contents in HFR preview only case to create
2612 * dummy batch channel so that sensor streaming is in
2613 * HFR mode */
2614 if (!m_bIsVideo && (streamList->operation_mode ==
2615 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2616 mDummyBatchStream = *newStream;
2617 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002618 int bufferCount = MAX_INFLIGHT_REQUESTS;
2619 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2620 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002621 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2622 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2623 bufferCount = m_bIs4KVideo ?
2624 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2625 }
2626
Thierry Strudel2896d122017-02-23 19:18:03 -08002627 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002628 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2629 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002630 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 this,
2632 newStream,
2633 (cam_stream_type_t)
2634 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2635 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2636 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002637 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 if (channel == NULL) {
2639 LOGE("allocation of channel failed");
2640 pthread_mutex_unlock(&mMutex);
2641 return -ENOMEM;
2642 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002643 /* disable UBWC for preview, though supported,
2644 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002645 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 (previewSize.width == (int32_t)videoWidth)&&
2647 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002648 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002649 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002650 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002651 /* When goog_zoom is linked to the preview or video stream,
2652 * disable ubwc to the linked stream */
2653 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2654 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2655 channel->setUBWCEnabled(false);
2656 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002657 newStream->max_buffers = channel->getNumBuffers();
2658 newStream->priv = channel;
2659 }
2660 break;
2661 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2662 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2663 mChannelHandle,
2664 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002665 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002666 this,
2667 newStream,
2668 (cam_stream_type_t)
2669 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2670 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2671 mMetadataChannel);
2672 if (channel == NULL) {
2673 LOGE("allocation of YUV channel failed");
2674 pthread_mutex_unlock(&mMutex);
2675 return -ENOMEM;
2676 }
2677 newStream->max_buffers = channel->getNumBuffers();
2678 newStream->priv = channel;
2679 break;
2680 }
2681 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2682 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002683 case HAL_PIXEL_FORMAT_RAW10: {
2684 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2685 (HAL_DATASPACE_DEPTH != newStream->data_space))
2686 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002687 mRawChannel = new QCamera3RawChannel(
2688 mCameraHandle->camera_handle, mChannelHandle,
2689 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002690 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002691 this, newStream,
2692 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002693 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002694 if (mRawChannel == NULL) {
2695 LOGE("allocation of raw channel failed");
2696 pthread_mutex_unlock(&mMutex);
2697 return -ENOMEM;
2698 }
2699 newStream->max_buffers = mRawChannel->getNumBuffers();
2700 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2701 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002703 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002704 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2705 mDepthChannel = new QCamera3DepthChannel(
2706 mCameraHandle->camera_handle, mChannelHandle,
2707 mCameraHandle->ops, NULL, NULL, &padding_info,
2708 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2709 mMetadataChannel);
2710 if (NULL == mDepthChannel) {
2711 LOGE("Allocation of depth channel failed");
2712 pthread_mutex_unlock(&mMutex);
2713 return NO_MEMORY;
2714 }
2715 newStream->priv = mDepthChannel;
2716 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2717 } else {
2718 // Max live snapshot inflight buffer is 1. This is to mitigate
2719 // frame drop issues for video snapshot. The more buffers being
2720 // allocated, the more frame drops there are.
2721 mPictureChannel = new QCamera3PicChannel(
2722 mCameraHandle->camera_handle, mChannelHandle,
2723 mCameraHandle->ops, captureResultCb,
2724 setBufferErrorStatus, &padding_info, this, newStream,
2725 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2726 m_bIs4KVideo, isZsl, mMetadataChannel,
2727 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2728 if (mPictureChannel == NULL) {
2729 LOGE("allocation of channel failed");
2730 pthread_mutex_unlock(&mMutex);
2731 return -ENOMEM;
2732 }
2733 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2734 newStream->max_buffers = mPictureChannel->getNumBuffers();
2735 mPictureChannel->overrideYuvSize(
2736 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2737 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002739 break;
2740
2741 default:
2742 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002743 pthread_mutex_unlock(&mMutex);
2744 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002745 }
2746 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2747 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2748 } else {
2749 LOGE("Error, Unknown stream type");
2750 pthread_mutex_unlock(&mMutex);
2751 return -EINVAL;
2752 }
2753
2754 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002755 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002756 // Here we only care whether it's EIS3 or not
2757 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2758 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2759 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2760 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002761 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002762 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002763 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2765 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2766 }
2767 }
2768
2769 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2770 it != mStreamInfo.end(); it++) {
2771 if ((*it)->stream == newStream) {
2772 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2773 break;
2774 }
2775 }
2776 } else {
2777 // Channel already exists for this stream
2778 // Do nothing for now
2779 }
2780 padding_info = gCamCapability[mCameraId]->padding_info;
2781
Emilian Peev7650c122017-01-19 08:24:33 -08002782 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002783 * since there is no real stream associated with it
2784 */
Emilian Peev7650c122017-01-19 08:24:33 -08002785 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002786 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2787 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002788 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002790 }
2791
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002792 // Let buffer dispatcher know the configured streams.
2793 mOutputBufferDispatcher.configureStreams(streamList);
2794
Thierry Strudel2896d122017-02-23 19:18:03 -08002795 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2796 onlyRaw = false;
2797 }
2798
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002799 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002800 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002801 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 cam_analysis_info_t analysisInfo;
2803 int32_t ret = NO_ERROR;
2804 ret = mCommon.getAnalysisInfo(
2805 FALSE,
2806 analysisFeatureMask,
2807 &analysisInfo);
2808 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002809 cam_color_filter_arrangement_t analysis_color_arrangement =
2810 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2811 CAM_FILTER_ARRANGEMENT_Y :
2812 gCamCapability[mCameraId]->color_arrangement);
2813 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2814 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002815 cam_dimension_t analysisDim;
2816 analysisDim = mCommon.getMatchingDimension(previewSize,
2817 analysisInfo.analysis_recommended_res);
2818
2819 mAnalysisChannel = new QCamera3SupportChannel(
2820 mCameraHandle->camera_handle,
2821 mChannelHandle,
2822 mCameraHandle->ops,
2823 &analysisInfo.analysis_padding_info,
2824 analysisFeatureMask,
2825 CAM_STREAM_TYPE_ANALYSIS,
2826 &analysisDim,
2827 (analysisInfo.analysis_format
2828 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2829 : CAM_FORMAT_YUV_420_NV21),
2830 analysisInfo.hw_analysis_supported,
2831 gCamCapability[mCameraId]->color_arrangement,
2832 this,
2833 0); // force buffer count to 0
2834 } else {
2835 LOGW("getAnalysisInfo failed, ret = %d", ret);
2836 }
2837 if (!mAnalysisChannel) {
2838 LOGW("Analysis channel cannot be created");
2839 }
2840 }
2841
Thierry Strudel3d639192016-09-09 11:52:26 -07002842 //RAW DUMP channel
2843 if (mEnableRawDump && isRawStreamRequested == false){
2844 cam_dimension_t rawDumpSize;
2845 rawDumpSize = getMaxRawSize(mCameraId);
2846 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2847 setPAAFSupport(rawDumpFeatureMask,
2848 CAM_STREAM_TYPE_RAW,
2849 gCamCapability[mCameraId]->color_arrangement);
2850 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2851 mChannelHandle,
2852 mCameraHandle->ops,
2853 rawDumpSize,
2854 &padding_info,
2855 this, rawDumpFeatureMask);
2856 if (!mRawDumpChannel) {
2857 LOGE("Raw Dump channel cannot be created");
2858 pthread_mutex_unlock(&mMutex);
2859 return -ENOMEM;
2860 }
2861 }
2862
Thierry Strudel3d639192016-09-09 11:52:26 -07002863 if (mAnalysisChannel) {
2864 cam_analysis_info_t analysisInfo;
2865 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2866 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2867 CAM_STREAM_TYPE_ANALYSIS;
2868 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2869 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002870 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002871 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2872 &analysisInfo);
2873 if (rc != NO_ERROR) {
2874 LOGE("getAnalysisInfo failed, ret = %d", rc);
2875 pthread_mutex_unlock(&mMutex);
2876 return rc;
2877 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002878 cam_color_filter_arrangement_t analysis_color_arrangement =
2879 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2880 CAM_FILTER_ARRANGEMENT_Y :
2881 gCamCapability[mCameraId]->color_arrangement);
2882 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2883 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2884 analysis_color_arrangement);
2885
Thierry Strudel3d639192016-09-09 11:52:26 -07002886 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002887 mCommon.getMatchingDimension(previewSize,
2888 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002889 mStreamConfigInfo.num_streams++;
2890 }
2891
Thierry Strudel2896d122017-02-23 19:18:03 -08002892 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002893 cam_analysis_info_t supportInfo;
2894 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2895 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2896 setPAAFSupport(callbackFeatureMask,
2897 CAM_STREAM_TYPE_CALLBACK,
2898 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002899 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002900 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002901 if (ret != NO_ERROR) {
2902 /* Ignore the error for Mono camera
2903 * because the PAAF bit mask is only set
2904 * for CAM_STREAM_TYPE_ANALYSIS stream type
2905 */
2906 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2907 LOGW("getAnalysisInfo failed, ret = %d", ret);
2908 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002909 }
2910 mSupportChannel = new QCamera3SupportChannel(
2911 mCameraHandle->camera_handle,
2912 mChannelHandle,
2913 mCameraHandle->ops,
2914 &gCamCapability[mCameraId]->padding_info,
2915 callbackFeatureMask,
2916 CAM_STREAM_TYPE_CALLBACK,
2917 &QCamera3SupportChannel::kDim,
2918 CAM_FORMAT_YUV_420_NV21,
2919 supportInfo.hw_analysis_supported,
2920 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002921 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002922 if (!mSupportChannel) {
2923 LOGE("dummy channel cannot be created");
2924 pthread_mutex_unlock(&mMutex);
2925 return -ENOMEM;
2926 }
2927 }
2928
2929 if (mSupportChannel) {
2930 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2931 QCamera3SupportChannel::kDim;
2932 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2933 CAM_STREAM_TYPE_CALLBACK;
2934 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2935 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2937 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2938 gCamCapability[mCameraId]->color_arrangement);
2939 mStreamConfigInfo.num_streams++;
2940 }
2941
2942 if (mRawDumpChannel) {
2943 cam_dimension_t rawSize;
2944 rawSize = getMaxRawSize(mCameraId);
2945 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2946 rawSize;
2947 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2948 CAM_STREAM_TYPE_RAW;
2949 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2950 CAM_QCOM_FEATURE_NONE;
2951 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2952 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2953 gCamCapability[mCameraId]->color_arrangement);
2954 mStreamConfigInfo.num_streams++;
2955 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002956
2957 if (mHdrPlusRawSrcChannel) {
2958 cam_dimension_t rawSize;
2959 rawSize = getMaxRawSize(mCameraId);
2960 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2961 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2962 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2963 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2964 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2965 gCamCapability[mCameraId]->color_arrangement);
2966 mStreamConfigInfo.num_streams++;
2967 }
2968
Thierry Strudel3d639192016-09-09 11:52:26 -07002969 /* In HFR mode, if video stream is not added, create a dummy channel so that
2970 * ISP can create a batch mode even for preview only case. This channel is
2971 * never 'start'ed (no stream-on), it is only 'initialized' */
2972 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2973 !m_bIsVideo) {
2974 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2975 setPAAFSupport(dummyFeatureMask,
2976 CAM_STREAM_TYPE_VIDEO,
2977 gCamCapability[mCameraId]->color_arrangement);
2978 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2979 mChannelHandle,
2980 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002981 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002982 this,
2983 &mDummyBatchStream,
2984 CAM_STREAM_TYPE_VIDEO,
2985 dummyFeatureMask,
2986 mMetadataChannel);
2987 if (NULL == mDummyBatchChannel) {
2988 LOGE("creation of mDummyBatchChannel failed."
2989 "Preview will use non-hfr sensor mode ");
2990 }
2991 }
2992 if (mDummyBatchChannel) {
2993 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2994 mDummyBatchStream.width;
2995 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2996 mDummyBatchStream.height;
2997 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2998 CAM_STREAM_TYPE_VIDEO;
2999 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3000 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3001 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3002 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3003 gCamCapability[mCameraId]->color_arrangement);
3004 mStreamConfigInfo.num_streams++;
3005 }
3006
3007 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3008 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003009 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003010 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003011
3012 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3013 for (pendingRequestIterator i = mPendingRequestsList.begin();
3014 i != mPendingRequestsList.end();) {
3015 i = erasePendingRequest(i);
3016 }
3017 mPendingFrameDropList.clear();
3018 // Initialize/Reset the pending buffers list
3019 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3020 req.mPendingBufferList.clear();
3021 }
3022 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3023
Thierry Strudel3d639192016-09-09 11:52:26 -07003024 mCurJpegMeta.clear();
3025 //Get min frame duration for this streams configuration
3026 deriveMinFrameDuration();
3027
Chien-Yu Chenee335912017-02-09 17:53:20 -08003028 mFirstPreviewIntentSeen = false;
3029
3030 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003031 {
3032 Mutex::Autolock l(gHdrPlusClientLock);
3033 disableHdrPlusModeLocked();
3034 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003035
Thierry Strudel3d639192016-09-09 11:52:26 -07003036 // Update state
3037 mState = CONFIGURED;
3038
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003039 mFirstMetadataCallback = true;
3040
Thierry Strudel3d639192016-09-09 11:52:26 -07003041 pthread_mutex_unlock(&mMutex);
3042
3043 return rc;
3044}
3045
3046/*===========================================================================
3047 * FUNCTION : validateCaptureRequest
3048 *
3049 * DESCRIPTION: validate a capture request from camera service
3050 *
3051 * PARAMETERS :
3052 * @request : request from framework to process
3053 *
3054 * RETURN :
3055 *
3056 *==========================================================================*/
3057int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003058 camera3_capture_request_t *request,
3059 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003060{
3061 ssize_t idx = 0;
3062 const camera3_stream_buffer_t *b;
3063 CameraMetadata meta;
3064
3065 /* Sanity check the request */
3066 if (request == NULL) {
3067 LOGE("NULL capture request");
3068 return BAD_VALUE;
3069 }
3070
3071 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3072 /*settings cannot be null for the first request*/
3073 return BAD_VALUE;
3074 }
3075
3076 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003077 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3078 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003079 LOGE("Request %d: No output buffers provided!",
3080 __FUNCTION__, frameNumber);
3081 return BAD_VALUE;
3082 }
3083 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3084 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3085 request->num_output_buffers, MAX_NUM_STREAMS);
3086 return BAD_VALUE;
3087 }
3088 if (request->input_buffer != NULL) {
3089 b = request->input_buffer;
3090 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3091 LOGE("Request %d: Buffer %ld: Status not OK!",
3092 frameNumber, (long)idx);
3093 return BAD_VALUE;
3094 }
3095 if (b->release_fence != -1) {
3096 LOGE("Request %d: Buffer %ld: Has a release fence!",
3097 frameNumber, (long)idx);
3098 return BAD_VALUE;
3099 }
3100 if (b->buffer == NULL) {
3101 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3102 frameNumber, (long)idx);
3103 return BAD_VALUE;
3104 }
3105 }
3106
3107 // Validate all buffers
3108 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003109 if (b == NULL) {
3110 return BAD_VALUE;
3111 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003112 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003113 QCamera3ProcessingChannel *channel =
3114 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3115 if (channel == NULL) {
3116 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3117 frameNumber, (long)idx);
3118 return BAD_VALUE;
3119 }
3120 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3121 LOGE("Request %d: Buffer %ld: Status not OK!",
3122 frameNumber, (long)idx);
3123 return BAD_VALUE;
3124 }
3125 if (b->release_fence != -1) {
3126 LOGE("Request %d: Buffer %ld: Has a release fence!",
3127 frameNumber, (long)idx);
3128 return BAD_VALUE;
3129 }
3130 if (b->buffer == NULL) {
3131 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3132 frameNumber, (long)idx);
3133 return BAD_VALUE;
3134 }
3135 if (*(b->buffer) == NULL) {
3136 LOGE("Request %d: Buffer %ld: NULL private handle!",
3137 frameNumber, (long)idx);
3138 return BAD_VALUE;
3139 }
3140 idx++;
3141 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003142 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003143 return NO_ERROR;
3144}
3145
3146/*===========================================================================
3147 * FUNCTION : deriveMinFrameDuration
3148 *
3149 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3150 * on currently configured streams.
3151 *
3152 * PARAMETERS : NONE
3153 *
3154 * RETURN : NONE
3155 *
3156 *==========================================================================*/
3157void QCamera3HardwareInterface::deriveMinFrameDuration()
3158{
3159 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003160 bool hasRaw = false;
3161
3162 mMinRawFrameDuration = 0;
3163 mMinJpegFrameDuration = 0;
3164 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003165
3166 maxJpegDim = 0;
3167 maxProcessedDim = 0;
3168 maxRawDim = 0;
3169
3170 // Figure out maximum jpeg, processed, and raw dimensions
3171 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3172 it != mStreamInfo.end(); it++) {
3173
3174 // Input stream doesn't have valid stream_type
3175 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3176 continue;
3177
3178 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3179 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3180 if (dimension > maxJpegDim)
3181 maxJpegDim = dimension;
3182 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3183 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3184 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003185 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003186 if (dimension > maxRawDim)
3187 maxRawDim = dimension;
3188 } else {
3189 if (dimension > maxProcessedDim)
3190 maxProcessedDim = dimension;
3191 }
3192 }
3193
3194 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3195 MAX_SIZES_CNT);
3196
3197 //Assume all jpeg dimensions are in processed dimensions.
3198 if (maxJpegDim > maxProcessedDim)
3199 maxProcessedDim = maxJpegDim;
3200 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003201 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003202 maxRawDim = INT32_MAX;
3203
3204 for (size_t i = 0; i < count; i++) {
3205 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3206 gCamCapability[mCameraId]->raw_dim[i].height;
3207 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3208 maxRawDim = dimension;
3209 }
3210 }
3211
3212 //Find minimum durations for processed, jpeg, and raw
3213 for (size_t i = 0; i < count; i++) {
3214 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3215 gCamCapability[mCameraId]->raw_dim[i].height) {
3216 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3217 break;
3218 }
3219 }
3220 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3221 for (size_t i = 0; i < count; i++) {
3222 if (maxProcessedDim ==
3223 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3224 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3225 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3226 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3227 break;
3228 }
3229 }
3230}
3231
3232/*===========================================================================
3233 * FUNCTION : getMinFrameDuration
3234 *
3235 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3236 * and current request configuration.
3237 *
3238 * PARAMETERS : @request: requset sent by the frameworks
3239 *
3240 * RETURN : min farme duration for a particular request
3241 *
3242 *==========================================================================*/
3243int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3244{
3245 bool hasJpegStream = false;
3246 bool hasRawStream = false;
3247 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3248 const camera3_stream_t *stream = request->output_buffers[i].stream;
3249 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3250 hasJpegStream = true;
3251 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3252 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3253 stream->format == HAL_PIXEL_FORMAT_RAW16)
3254 hasRawStream = true;
3255 }
3256
3257 if (!hasJpegStream)
3258 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3259 else
3260 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3261}
3262
3263/*===========================================================================
3264 * FUNCTION : handleBuffersDuringFlushLock
3265 *
3266 * DESCRIPTION: Account for buffers returned from back-end during flush
3267 * This function is executed while mMutex is held by the caller.
3268 *
3269 * PARAMETERS :
3270 * @buffer: image buffer for the callback
3271 *
3272 * RETURN :
3273 *==========================================================================*/
3274void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3275{
3276 bool buffer_found = false;
3277 for (List<PendingBuffersInRequest>::iterator req =
3278 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3279 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3280 for (List<PendingBufferInfo>::iterator i =
3281 req->mPendingBufferList.begin();
3282 i != req->mPendingBufferList.end(); i++) {
3283 if (i->buffer == buffer->buffer) {
3284 mPendingBuffersMap.numPendingBufsAtFlush--;
3285 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3286 buffer->buffer, req->frame_number,
3287 mPendingBuffersMap.numPendingBufsAtFlush);
3288 buffer_found = true;
3289 break;
3290 }
3291 }
3292 if (buffer_found) {
3293 break;
3294 }
3295 }
3296 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3297 //signal the flush()
3298 LOGD("All buffers returned to HAL. Continue flush");
3299 pthread_cond_signal(&mBuffersCond);
3300 }
3301}
3302
Thierry Strudel3d639192016-09-09 11:52:26 -07003303/*===========================================================================
3304 * FUNCTION : handleBatchMetadata
3305 *
3306 * DESCRIPTION: Handles metadata buffer callback in batch mode
3307 *
3308 * PARAMETERS : @metadata_buf: metadata buffer
3309 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3310 * the meta buf in this method
3311 *
3312 * RETURN :
3313 *
3314 *==========================================================================*/
3315void QCamera3HardwareInterface::handleBatchMetadata(
3316 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3317{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003318 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003319
3320 if (NULL == metadata_buf) {
3321 LOGE("metadata_buf is NULL");
3322 return;
3323 }
3324 /* In batch mode, the metdata will contain the frame number and timestamp of
3325 * the last frame in the batch. Eg: a batch containing buffers from request
3326 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3327 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3328 * multiple process_capture_results */
3329 metadata_buffer_t *metadata =
3330 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3331 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3332 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3333 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3334 uint32_t frame_number = 0, urgent_frame_number = 0;
3335 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3336 bool invalid_metadata = false;
3337 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3338 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003339 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003340
3341 int32_t *p_frame_number_valid =
3342 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3343 uint32_t *p_frame_number =
3344 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3345 int64_t *p_capture_time =
3346 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3347 int32_t *p_urgent_frame_number_valid =
3348 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3349 uint32_t *p_urgent_frame_number =
3350 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3351
3352 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3353 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3354 (NULL == p_urgent_frame_number)) {
3355 LOGE("Invalid metadata");
3356 invalid_metadata = true;
3357 } else {
3358 frame_number_valid = *p_frame_number_valid;
3359 last_frame_number = *p_frame_number;
3360 last_frame_capture_time = *p_capture_time;
3361 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3362 last_urgent_frame_number = *p_urgent_frame_number;
3363 }
3364
3365 /* In batchmode, when no video buffers are requested, set_parms are sent
3366 * for every capture_request. The difference between consecutive urgent
3367 * frame numbers and frame numbers should be used to interpolate the
3368 * corresponding frame numbers and time stamps */
3369 pthread_mutex_lock(&mMutex);
3370 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003371 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3372 if(idx < 0) {
3373 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3374 last_urgent_frame_number);
3375 mState = ERROR;
3376 pthread_mutex_unlock(&mMutex);
3377 return;
3378 }
3379 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003380 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3381 first_urgent_frame_number;
3382
3383 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3384 urgent_frame_number_valid,
3385 first_urgent_frame_number, last_urgent_frame_number);
3386 }
3387
3388 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003389 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3390 if(idx < 0) {
3391 LOGE("Invalid frame number received: %d. Irrecoverable error",
3392 last_frame_number);
3393 mState = ERROR;
3394 pthread_mutex_unlock(&mMutex);
3395 return;
3396 }
3397 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 frameNumDiff = last_frame_number + 1 -
3399 first_frame_number;
3400 mPendingBatchMap.removeItem(last_frame_number);
3401
3402 LOGD("frm: valid: %d frm_num: %d - %d",
3403 frame_number_valid,
3404 first_frame_number, last_frame_number);
3405
3406 }
3407 pthread_mutex_unlock(&mMutex);
3408
3409 if (urgent_frame_number_valid || frame_number_valid) {
3410 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3411 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3412 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3413 urgentFrameNumDiff, last_urgent_frame_number);
3414 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3415 LOGE("frameNumDiff: %d frameNum: %d",
3416 frameNumDiff, last_frame_number);
3417 }
3418
3419 for (size_t i = 0; i < loopCount; i++) {
3420 /* handleMetadataWithLock is called even for invalid_metadata for
3421 * pipeline depth calculation */
3422 if (!invalid_metadata) {
3423 /* Infer frame number. Batch metadata contains frame number of the
3424 * last frame */
3425 if (urgent_frame_number_valid) {
3426 if (i < urgentFrameNumDiff) {
3427 urgent_frame_number =
3428 first_urgent_frame_number + i;
3429 LOGD("inferred urgent frame_number: %d",
3430 urgent_frame_number);
3431 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3432 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3433 } else {
3434 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3435 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3436 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3437 }
3438 }
3439
3440 /* Infer frame number. Batch metadata contains frame number of the
3441 * last frame */
3442 if (frame_number_valid) {
3443 if (i < frameNumDiff) {
3444 frame_number = first_frame_number + i;
3445 LOGD("inferred frame_number: %d", frame_number);
3446 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3447 CAM_INTF_META_FRAME_NUMBER, frame_number);
3448 } else {
3449 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3452 }
3453 }
3454
3455 if (last_frame_capture_time) {
3456 //Infer timestamp
3457 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003458 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003460 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003461 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3462 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3463 LOGD("batch capture_time: %lld, capture_time: %lld",
3464 last_frame_capture_time, capture_time);
3465 }
3466 }
3467 pthread_mutex_lock(&mMutex);
3468 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003469 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003470 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3471 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003472 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003473 pthread_mutex_unlock(&mMutex);
3474 }
3475
3476 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003477 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 mMetadataChannel->bufDone(metadata_buf);
3479 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003480 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 }
3482}
3483
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003484void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3485 camera3_error_msg_code_t errorCode)
3486{
3487 camera3_notify_msg_t notify_msg;
3488 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3489 notify_msg.type = CAMERA3_MSG_ERROR;
3490 notify_msg.message.error.error_code = errorCode;
3491 notify_msg.message.error.error_stream = NULL;
3492 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003493 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494
3495 return;
3496}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003497
3498/*===========================================================================
3499 * FUNCTION : sendPartialMetadataWithLock
3500 *
3501 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3502 *
3503 * PARAMETERS : @metadata: metadata buffer
3504 * @requestIter: The iterator for the pending capture request for
3505 * which the partial result is being sen
3506 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3507 * last urgent metadata in a batch. Always true for non-batch mode
3508 *
3509 * RETURN :
3510 *
3511 *==========================================================================*/
3512
3513void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3514 metadata_buffer_t *metadata,
3515 const pendingRequestIterator requestIter,
3516 bool lastUrgentMetadataInBatch)
3517{
3518 camera3_capture_result_t result;
3519 memset(&result, 0, sizeof(camera3_capture_result_t));
3520
3521 requestIter->partial_result_cnt++;
3522
3523 // Extract 3A metadata
3524 result.result = translateCbUrgentMetadataToResultMetadata(
3525 metadata, lastUrgentMetadataInBatch);
3526 // Populate metadata result
3527 result.frame_number = requestIter->frame_number;
3528 result.num_output_buffers = 0;
3529 result.output_buffers = NULL;
3530 result.partial_result = requestIter->partial_result_cnt;
3531
3532 {
3533 Mutex::Autolock l(gHdrPlusClientLock);
3534 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3535 // Notify HDR+ client about the partial metadata.
3536 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3537 result.partial_result == PARTIAL_RESULT_COUNT);
3538 }
3539 }
3540
3541 orchestrateResult(&result);
3542 LOGD("urgent frame_number = %u", result.frame_number);
3543 free_camera_metadata((camera_metadata_t *)result.result);
3544}
3545
Thierry Strudel3d639192016-09-09 11:52:26 -07003546/*===========================================================================
3547 * FUNCTION : handleMetadataWithLock
3548 *
3549 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3550 *
3551 * PARAMETERS : @metadata_buf: metadata buffer
3552 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3553 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003554 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3555 * last urgent metadata in a batch. Always true for non-batch mode
3556 * @lastMetadataInBatch: Boolean to indicate whether this is the
3557 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003558 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3559 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003560 *
3561 * RETURN :
3562 *
3563 *==========================================================================*/
3564void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003565 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003566 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3567 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003568{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003569 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003570 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3571 //during flush do not send metadata from this thread
3572 LOGD("not sending metadata during flush or when mState is error");
3573 if (free_and_bufdone_meta_buf) {
3574 mMetadataChannel->bufDone(metadata_buf);
3575 free(metadata_buf);
3576 }
3577 return;
3578 }
3579
3580 //not in flush
3581 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3582 int32_t frame_number_valid, urgent_frame_number_valid;
3583 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003584 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003585 nsecs_t currentSysTime;
3586
3587 int32_t *p_frame_number_valid =
3588 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3589 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3590 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003591 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 int32_t *p_urgent_frame_number_valid =
3593 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3594 uint32_t *p_urgent_frame_number =
3595 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3596 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3597 metadata) {
3598 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3599 *p_frame_number_valid, *p_frame_number);
3600 }
3601
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 camera_metadata_t *resultMetadata = nullptr;
3603
Thierry Strudel3d639192016-09-09 11:52:26 -07003604 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3605 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3606 LOGE("Invalid metadata");
3607 if (free_and_bufdone_meta_buf) {
3608 mMetadataChannel->bufDone(metadata_buf);
3609 free(metadata_buf);
3610 }
3611 goto done_metadata;
3612 }
3613 frame_number_valid = *p_frame_number_valid;
3614 frame_number = *p_frame_number;
3615 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003616 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003617 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3618 urgent_frame_number = *p_urgent_frame_number;
3619 currentSysTime = systemTime(CLOCK_MONOTONIC);
3620
Jason Lee603176d2017-05-31 11:43:27 -07003621 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3622 const int tries = 3;
3623 nsecs_t bestGap, measured;
3624 for (int i = 0; i < tries; ++i) {
3625 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3626 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3627 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3628 const nsecs_t gap = tmono2 - tmono;
3629 if (i == 0 || gap < bestGap) {
3630 bestGap = gap;
3631 measured = tbase - ((tmono + tmono2) >> 1);
3632 }
3633 }
3634 capture_time -= measured;
3635 }
3636
Thierry Strudel3d639192016-09-09 11:52:26 -07003637 // Detect if buffers from any requests are overdue
3638 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003639 int64_t timeout;
3640 {
3641 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3642 // If there is a pending HDR+ request, the following requests may be blocked until the
3643 // HDR+ request is done. So allow a longer timeout.
3644 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3645 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3646 }
3647
3648 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003649 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003650 assert(missed.stream->priv);
3651 if (missed.stream->priv) {
3652 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3653 assert(ch->mStreams[0]);
3654 if (ch->mStreams[0]) {
3655 LOGE("Cancel missing frame = %d, buffer = %p,"
3656 "stream type = %d, stream format = %d",
3657 req.frame_number, missed.buffer,
3658 ch->mStreams[0]->getMyType(), missed.stream->format);
3659 ch->timeoutFrame(req.frame_number);
3660 }
3661 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003662 }
3663 }
3664 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003665 //For the very first metadata callback, regardless whether it contains valid
3666 //frame number, send the partial metadata for the jumpstarting requests.
3667 //Note that this has to be done even if the metadata doesn't contain valid
3668 //urgent frame number, because in the case only 1 request is ever submitted
3669 //to HAL, there won't be subsequent valid urgent frame number.
3670 if (mFirstMetadataCallback) {
3671 for (pendingRequestIterator i =
3672 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3673 if (i->bUseFirstPartial) {
3674 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3675 }
3676 }
3677 mFirstMetadataCallback = false;
3678 }
3679
Thierry Strudel3d639192016-09-09 11:52:26 -07003680 //Partial result on process_capture_result for timestamp
3681 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003682 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003683
3684 //Recieved an urgent Frame Number, handle it
3685 //using partial results
3686 for (pendingRequestIterator i =
3687 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3688 LOGD("Iterator Frame = %d urgent frame = %d",
3689 i->frame_number, urgent_frame_number);
3690
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003691 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 (i->partial_result_cnt == 0)) {
3693 LOGE("Error: HAL missed urgent metadata for frame number %d",
3694 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003695 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003696 }
3697
3698 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003699 i->partial_result_cnt == 0) {
3700 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003701 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3702 // Instant AEC settled for this frame.
3703 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3704 mInstantAECSettledFrameNumber = urgent_frame_number;
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 break;
3707 }
3708 }
3709 }
3710
3711 if (!frame_number_valid) {
3712 LOGD("Not a valid normal frame number, used as SOF only");
3713 if (free_and_bufdone_meta_buf) {
3714 mMetadataChannel->bufDone(metadata_buf);
3715 free(metadata_buf);
3716 }
3717 goto done_metadata;
3718 }
3719 LOGH("valid frame_number = %u, capture_time = %lld",
3720 frame_number, capture_time);
3721
Emilian Peev4e0fe952017-06-30 12:40:09 -07003722 handleDepthDataLocked(metadata->depth_data, frame_number,
3723 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003724
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003725 // Check whether any stream buffer corresponding to this is dropped or not
3726 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3727 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3728 for (auto & pendingRequest : mPendingRequestsList) {
3729 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3730 mInstantAECSettledFrameNumber)) {
3731 camera3_notify_msg_t notify_msg = {};
3732 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003733 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734 QCamera3ProcessingChannel *channel =
3735 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003736 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003737 if (p_cam_frame_drop) {
3738 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003739 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003740 // Got the stream ID for drop frame.
3741 dropFrame = true;
3742 break;
3743 }
3744 }
3745 } else {
3746 // This is instant AEC case.
3747 // For instant AEC drop the stream untill AEC is settled.
3748 dropFrame = true;
3749 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003750
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003751 if (dropFrame) {
3752 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3753 if (p_cam_frame_drop) {
3754 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003755 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003757 } else {
3758 // For instant AEC, inform frame drop and frame number
3759 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3760 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 pendingRequest.frame_number, streamID,
3762 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003763 }
3764 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003766 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003767 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003768 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 if (p_cam_frame_drop) {
3770 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003771 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003773 } else {
3774 // For instant AEC, inform frame drop and frame number
3775 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3776 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003777 pendingRequest.frame_number, streamID,
3778 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 }
3780 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003782 PendingFrameDrop.stream_ID = streamID;
3783 // Add the Frame drop info to mPendingFrameDropList
3784 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003786 }
3787 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003789
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003790 for (auto & pendingRequest : mPendingRequestsList) {
3791 // Find the pending request with the frame number.
3792 if (pendingRequest.frame_number == frame_number) {
3793 // Update the sensor timestamp.
3794 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003795
Thierry Strudel3d639192016-09-09 11:52:26 -07003796
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003797 /* Set the timestamp in display metadata so that clients aware of
3798 private_handle such as VT can use this un-modified timestamps.
3799 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003800 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003801
Thierry Strudel3d639192016-09-09 11:52:26 -07003802 // Find channel requiring metadata, meaning internal offline postprocess
3803 // is needed.
3804 //TODO: for now, we don't support two streams requiring metadata at the same time.
3805 // (because we are not making copies, and metadata buffer is not reference counted.
3806 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3808 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003809 if (iter->need_metadata) {
3810 internalPproc = true;
3811 QCamera3ProcessingChannel *channel =
3812 (QCamera3ProcessingChannel *)iter->stream->priv;
3813 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003814 if(p_is_metabuf_queued != NULL) {
3815 *p_is_metabuf_queued = true;
3816 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003817 break;
3818 }
3819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003820 for (auto itr = pendingRequest.internalRequestList.begin();
3821 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003822 if (itr->need_metadata) {
3823 internalPproc = true;
3824 QCamera3ProcessingChannel *channel =
3825 (QCamera3ProcessingChannel *)itr->stream->priv;
3826 channel->queueReprocMetadata(metadata_buf);
3827 break;
3828 }
3829 }
3830
Thierry Strudel54dc9782017-02-15 12:12:10 -08003831 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003832
3833 bool *enableZsl = nullptr;
3834 if (gExposeEnableZslKey) {
3835 enableZsl = &pendingRequest.enableZsl;
3836 }
3837
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003838 resultMetadata = translateFromHalMetadata(metadata,
3839 pendingRequest.timestamp, pendingRequest.request_id,
3840 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3841 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003842 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003843 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003845 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003846 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003847 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003848
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003849 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003850
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003851 if (pendingRequest.blob_request) {
3852 //Dump tuning metadata if enabled and available
3853 char prop[PROPERTY_VALUE_MAX];
3854 memset(prop, 0, sizeof(prop));
3855 property_get("persist.camera.dumpmetadata", prop, "0");
3856 int32_t enabled = atoi(prop);
3857 if (enabled && metadata->is_tuning_params_valid) {
3858 dumpMetadataToFile(metadata->tuning_params,
3859 mMetaFrameCount,
3860 enabled,
3861 "Snapshot",
3862 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 }
3864 }
3865
3866 if (!internalPproc) {
3867 LOGD("couldn't find need_metadata for this metadata");
3868 // Return metadata buffer
3869 if (free_and_bufdone_meta_buf) {
3870 mMetadataChannel->bufDone(metadata_buf);
3871 free(metadata_buf);
3872 }
3873 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003874
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003875 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003876 }
3877 }
3878
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003879 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3880
3881 // Try to send out capture result metadata.
3882 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003883 return;
3884
Thierry Strudel3d639192016-09-09 11:52:26 -07003885done_metadata:
3886 for (pendingRequestIterator i = mPendingRequestsList.begin();
3887 i != mPendingRequestsList.end() ;i++) {
3888 i->pipeline_depth++;
3889 }
3890 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3891 unblockRequestIfNecessary();
3892}
3893
3894/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003895 * FUNCTION : handleDepthDataWithLock
3896 *
3897 * DESCRIPTION: Handles incoming depth data
3898 *
3899 * PARAMETERS : @depthData : Depth data
3900 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003901 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003902 *
3903 * RETURN :
3904 *
3905 *==========================================================================*/
3906void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003907 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003908 uint32_t currentFrameNumber;
3909 buffer_handle_t *depthBuffer;
3910
3911 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003912 return;
3913 }
3914
3915 camera3_stream_buffer_t resultBuffer =
3916 {.acquire_fence = -1,
3917 .release_fence = -1,
3918 .status = CAMERA3_BUFFER_STATUS_OK,
3919 .buffer = nullptr,
3920 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003921 do {
3922 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3923 if (nullptr == depthBuffer) {
3924 break;
3925 }
3926
Emilian Peev7650c122017-01-19 08:24:33 -08003927 resultBuffer.buffer = depthBuffer;
3928 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003929 if (valid) {
3930 int32_t rc = mDepthChannel->populateDepthData(depthData,
3931 frameNumber);
3932 if (NO_ERROR != rc) {
3933 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3934 } else {
3935 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3936 }
Emilian Peev7650c122017-01-19 08:24:33 -08003937 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003938 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003939 }
3940 } else if (currentFrameNumber > frameNumber) {
3941 break;
3942 } else {
3943 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3944 {{currentFrameNumber, mDepthChannel->getStream(),
3945 CAMERA3_MSG_ERROR_BUFFER}}};
3946 orchestrateNotify(&notify_msg);
3947
3948 LOGE("Depth buffer for frame number: %d is missing "
3949 "returning back!", currentFrameNumber);
3950 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3951 }
3952 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003953 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003954 } while (currentFrameNumber < frameNumber);
3955}
3956
3957/*===========================================================================
3958 * FUNCTION : notifyErrorFoPendingDepthData
3959 *
3960 * DESCRIPTION: Returns error for any pending depth buffers
3961 *
3962 * PARAMETERS : depthCh - depth channel that needs to get flushed
3963 *
3964 * RETURN :
3965 *
3966 *==========================================================================*/
3967void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3968 QCamera3DepthChannel *depthCh) {
3969 uint32_t currentFrameNumber;
3970 buffer_handle_t *depthBuffer;
3971
3972 if (nullptr == depthCh) {
3973 return;
3974 }
3975
3976 camera3_notify_msg_t notify_msg =
3977 {.type = CAMERA3_MSG_ERROR,
3978 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3979 camera3_stream_buffer_t resultBuffer =
3980 {.acquire_fence = -1,
3981 .release_fence = -1,
3982 .buffer = nullptr,
3983 .stream = depthCh->getStream(),
3984 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003985
3986 while (nullptr !=
3987 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3988 depthCh->unmapBuffer(currentFrameNumber);
3989
3990 notify_msg.message.error.frame_number = currentFrameNumber;
3991 orchestrateNotify(&notify_msg);
3992
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003993 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003994 };
3995}
3996
3997/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003998 * FUNCTION : hdrPlusPerfLock
3999 *
4000 * DESCRIPTION: perf lock for HDR+ using custom intent
4001 *
4002 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4003 *
4004 * RETURN : None
4005 *
4006 *==========================================================================*/
4007void QCamera3HardwareInterface::hdrPlusPerfLock(
4008 mm_camera_super_buf_t *metadata_buf)
4009{
4010 if (NULL == metadata_buf) {
4011 LOGE("metadata_buf is NULL");
4012 return;
4013 }
4014 metadata_buffer_t *metadata =
4015 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4016 int32_t *p_frame_number_valid =
4017 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4018 uint32_t *p_frame_number =
4019 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4020
4021 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4022 LOGE("%s: Invalid metadata", __func__);
4023 return;
4024 }
4025
4026 //acquire perf lock for 5 sec after the last HDR frame is captured
4027 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4028 if ((p_frame_number != NULL) &&
4029 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004030 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004031 }
4032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004033}
4034
4035/*===========================================================================
4036 * FUNCTION : handleInputBufferWithLock
4037 *
4038 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4039 *
4040 * PARAMETERS : @frame_number: frame number of the input buffer
4041 *
4042 * RETURN :
4043 *
4044 *==========================================================================*/
4045void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4046{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004047 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 pendingRequestIterator i = mPendingRequestsList.begin();
4049 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4050 i++;
4051 }
4052 if (i != mPendingRequestsList.end() && i->input_buffer) {
4053 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004054 CameraMetadata settings;
4055 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4056 if(i->settings) {
4057 settings = i->settings;
4058 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4059 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004060 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004061 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004062 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004063 } else {
4064 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004065 }
4066
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004067 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4068 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4069 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004070
4071 camera3_capture_result result;
4072 memset(&result, 0, sizeof(camera3_capture_result));
4073 result.frame_number = frame_number;
4074 result.result = i->settings;
4075 result.input_buffer = i->input_buffer;
4076 result.partial_result = PARTIAL_RESULT_COUNT;
4077
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004078 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004079 LOGD("Input request metadata and input buffer frame_number = %u",
4080 i->frame_number);
4081 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004082
4083 // Dispatch result metadata that may be just unblocked by this reprocess result.
4084 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004085 } else {
4086 LOGE("Could not find input request for frame number %d", frame_number);
4087 }
4088}
4089
4090/*===========================================================================
4091 * FUNCTION : handleBufferWithLock
4092 *
4093 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4094 *
4095 * PARAMETERS : @buffer: image buffer for the callback
4096 * @frame_number: frame number of the image buffer
4097 *
4098 * RETURN :
4099 *
4100 *==========================================================================*/
4101void QCamera3HardwareInterface::handleBufferWithLock(
4102 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4103{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004104 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004105
4106 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4107 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4108 }
4109
Thierry Strudel3d639192016-09-09 11:52:26 -07004110 /* Nothing to be done during error state */
4111 if ((ERROR == mState) || (DEINIT == mState)) {
4112 return;
4113 }
4114 if (mFlushPerf) {
4115 handleBuffersDuringFlushLock(buffer);
4116 return;
4117 }
4118 //not in flush
4119 // If the frame number doesn't exist in the pending request list,
4120 // directly send the buffer to the frameworks, and update pending buffers map
4121 // Otherwise, book-keep the buffer.
4122 pendingRequestIterator i = mPendingRequestsList.begin();
4123 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4124 i++;
4125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004126
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004127 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004128 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004129 // For a reprocessing request, try to send out result metadata.
4130 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004132 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004133
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004134 // Check if this frame was dropped.
4135 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4136 m != mPendingFrameDropList.end(); m++) {
4137 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4138 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4139 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4140 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4141 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4142 frame_number, streamID);
4143 m = mPendingFrameDropList.erase(m);
4144 break;
4145 }
4146 }
4147
4148 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4149 LOGH("result frame_number = %d, buffer = %p",
4150 frame_number, buffer->buffer);
4151
4152 mPendingBuffersMap.removeBuf(buffer->buffer);
4153 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4154
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004155 if (mPreviewStarted == false) {
4156 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4157 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004158 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4159
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004160 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4161 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4162 mPreviewStarted = true;
4163
4164 // Set power hint for preview
4165 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4166 }
4167 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004168}
4169
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004170void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 const camera_metadata_t *resultMetadata)
4172{
4173 // Find the pending request for this result metadata.
4174 auto requestIter = mPendingRequestsList.begin();
4175 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4176 requestIter++;
4177 }
4178
4179 if (requestIter == mPendingRequestsList.end()) {
4180 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4181 return;
4182 }
4183
4184 // Update the result metadata
4185 requestIter->resultMetadata = resultMetadata;
4186
4187 // Check what type of request this is.
4188 bool liveRequest = false;
4189 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004190 // HDR+ request doesn't have partial results.
4191 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004192 } else if (requestIter->input_buffer != nullptr) {
4193 // Reprocessing request result is the same as settings.
4194 requestIter->resultMetadata = requestIter->settings;
4195 // Reprocessing request doesn't have partial results.
4196 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4197 } else {
4198 liveRequest = true;
4199 requestIter->partial_result_cnt++;
4200 mPendingLiveRequest--;
4201
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004202 {
4203 Mutex::Autolock l(gHdrPlusClientLock);
4204 // For a live request, send the metadata to HDR+ client.
4205 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4206 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4207 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4208 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004209 }
4210 }
4211
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004212 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4213}
4214
4215void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4216 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004217 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4218 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004219 bool readyToSend = true;
4220
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004221 // Iterate through the pending requests to send out result metadata that are ready. Also if
4222 // this result metadata belongs to a live request, notify errors for previous live requests
4223 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004224 auto iter = mPendingRequestsList.begin();
4225 while (iter != mPendingRequestsList.end()) {
4226 // Check if current pending request is ready. If it's not ready, the following pending
4227 // requests are also not ready.
4228 if (readyToSend && iter->resultMetadata == nullptr) {
4229 readyToSend = false;
4230 }
4231
4232 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4233
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234 camera3_capture_result_t result = {};
4235 result.frame_number = iter->frame_number;
4236 result.result = iter->resultMetadata;
4237 result.partial_result = iter->partial_result_cnt;
4238
4239 // If this pending buffer has result metadata, we may be able to send out shutter callback
4240 // and result metadata.
4241 if (iter->resultMetadata != nullptr) {
4242 if (!readyToSend) {
4243 // If any of the previous pending request is not ready, this pending request is
4244 // also not ready to send in order to keep shutter callbacks and result metadata
4245 // in order.
4246 iter++;
4247 continue;
4248 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004249 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004250 // If the result metadata belongs to a live request, notify errors for previous pending
4251 // live requests.
4252 mPendingLiveRequest--;
4253
4254 CameraMetadata dummyMetadata;
4255 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4256 result.result = dummyMetadata.release();
4257
4258 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004259
4260 // partial_result should be PARTIAL_RESULT_CNT in case of
4261 // ERROR_RESULT.
4262 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4263 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004264 } else {
4265 iter++;
4266 continue;
4267 }
4268
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004269 result.output_buffers = nullptr;
4270 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004271 orchestrateResult(&result);
4272
4273 // For reprocessing, result metadata is the same as settings so do not free it here to
4274 // avoid double free.
4275 if (result.result != iter->settings) {
4276 free_camera_metadata((camera_metadata_t *)result.result);
4277 }
4278 iter->resultMetadata = nullptr;
4279 iter = erasePendingRequest(iter);
4280 }
4281
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004282 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004283 for (auto &iter : mPendingRequestsList) {
4284 // Increment pipeline depth for the following pending requests.
4285 if (iter.frame_number > frameNumber) {
4286 iter.pipeline_depth++;
4287 }
4288 }
4289 }
4290
4291 unblockRequestIfNecessary();
4292}
4293
Thierry Strudel3d639192016-09-09 11:52:26 -07004294/*===========================================================================
4295 * FUNCTION : unblockRequestIfNecessary
4296 *
4297 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4298 * that mMutex is held when this function is called.
4299 *
4300 * PARAMETERS :
4301 *
4302 * RETURN :
4303 *
4304 *==========================================================================*/
4305void QCamera3HardwareInterface::unblockRequestIfNecessary()
4306{
4307 // Unblock process_capture_request
4308 pthread_cond_signal(&mRequestCond);
4309}
4310
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004311/*===========================================================================
4312 * FUNCTION : isHdrSnapshotRequest
4313 *
4314 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4315 *
4316 * PARAMETERS : camera3 request structure
4317 *
4318 * RETURN : boolean decision variable
4319 *
4320 *==========================================================================*/
4321bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4322{
4323 if (request == NULL) {
4324 LOGE("Invalid request handle");
4325 assert(0);
4326 return false;
4327 }
4328
4329 if (!mForceHdrSnapshot) {
4330 CameraMetadata frame_settings;
4331 frame_settings = request->settings;
4332
4333 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4334 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4335 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4336 return false;
4337 }
4338 } else {
4339 return false;
4340 }
4341
4342 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4343 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4344 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4345 return false;
4346 }
4347 } else {
4348 return false;
4349 }
4350 }
4351
4352 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4353 if (request->output_buffers[i].stream->format
4354 == HAL_PIXEL_FORMAT_BLOB) {
4355 return true;
4356 }
4357 }
4358
4359 return false;
4360}
4361/*===========================================================================
4362 * FUNCTION : orchestrateRequest
4363 *
4364 * DESCRIPTION: Orchestrates a capture request from camera service
4365 *
4366 * PARAMETERS :
4367 * @request : request from framework to process
4368 *
4369 * RETURN : Error status codes
4370 *
4371 *==========================================================================*/
4372int32_t QCamera3HardwareInterface::orchestrateRequest(
4373 camera3_capture_request_t *request)
4374{
4375
4376 uint32_t originalFrameNumber = request->frame_number;
4377 uint32_t originalOutputCount = request->num_output_buffers;
4378 const camera_metadata_t *original_settings = request->settings;
4379 List<InternalRequest> internallyRequestedStreams;
4380 List<InternalRequest> emptyInternalList;
4381
4382 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4383 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4384 uint32_t internalFrameNumber;
4385 CameraMetadata modified_meta;
4386
4387
4388 /* Add Blob channel to list of internally requested streams */
4389 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4390 if (request->output_buffers[i].stream->format
4391 == HAL_PIXEL_FORMAT_BLOB) {
4392 InternalRequest streamRequested;
4393 streamRequested.meteringOnly = 1;
4394 streamRequested.need_metadata = 0;
4395 streamRequested.stream = request->output_buffers[i].stream;
4396 internallyRequestedStreams.push_back(streamRequested);
4397 }
4398 }
4399 request->num_output_buffers = 0;
4400 auto itr = internallyRequestedStreams.begin();
4401
4402 /* Modify setting to set compensation */
4403 modified_meta = request->settings;
4404 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4405 uint8_t aeLock = 1;
4406 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4407 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4408 camera_metadata_t *modified_settings = modified_meta.release();
4409 request->settings = modified_settings;
4410
4411 /* Capture Settling & -2x frame */
4412 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4413 request->frame_number = internalFrameNumber;
4414 processCaptureRequest(request, internallyRequestedStreams);
4415
4416 request->num_output_buffers = originalOutputCount;
4417 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4418 request->frame_number = internalFrameNumber;
4419 processCaptureRequest(request, emptyInternalList);
4420 request->num_output_buffers = 0;
4421
4422 modified_meta = modified_settings;
4423 expCompensation = 0;
4424 aeLock = 1;
4425 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4426 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4427 modified_settings = modified_meta.release();
4428 request->settings = modified_settings;
4429
4430 /* Capture Settling & 0X frame */
4431
4432 itr = internallyRequestedStreams.begin();
4433 if (itr == internallyRequestedStreams.end()) {
4434 LOGE("Error Internally Requested Stream list is empty");
4435 assert(0);
4436 } else {
4437 itr->need_metadata = 0;
4438 itr->meteringOnly = 1;
4439 }
4440
4441 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4442 request->frame_number = internalFrameNumber;
4443 processCaptureRequest(request, internallyRequestedStreams);
4444
4445 itr = internallyRequestedStreams.begin();
4446 if (itr == internallyRequestedStreams.end()) {
4447 ALOGE("Error Internally Requested Stream list is empty");
4448 assert(0);
4449 } else {
4450 itr->need_metadata = 1;
4451 itr->meteringOnly = 0;
4452 }
4453
4454 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4455 request->frame_number = internalFrameNumber;
4456 processCaptureRequest(request, internallyRequestedStreams);
4457
4458 /* Capture 2X frame*/
4459 modified_meta = modified_settings;
4460 expCompensation = GB_HDR_2X_STEP_EV;
4461 aeLock = 1;
4462 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4463 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4464 modified_settings = modified_meta.release();
4465 request->settings = modified_settings;
4466
4467 itr = internallyRequestedStreams.begin();
4468 if (itr == internallyRequestedStreams.end()) {
4469 ALOGE("Error Internally Requested Stream list is empty");
4470 assert(0);
4471 } else {
4472 itr->need_metadata = 0;
4473 itr->meteringOnly = 1;
4474 }
4475 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4476 request->frame_number = internalFrameNumber;
4477 processCaptureRequest(request, internallyRequestedStreams);
4478
4479 itr = internallyRequestedStreams.begin();
4480 if (itr == internallyRequestedStreams.end()) {
4481 ALOGE("Error Internally Requested Stream list is empty");
4482 assert(0);
4483 } else {
4484 itr->need_metadata = 1;
4485 itr->meteringOnly = 0;
4486 }
4487
4488 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4489 request->frame_number = internalFrameNumber;
4490 processCaptureRequest(request, internallyRequestedStreams);
4491
4492
4493 /* Capture 2X on original streaming config*/
4494 internallyRequestedStreams.clear();
4495
4496 /* Restore original settings pointer */
4497 request->settings = original_settings;
4498 } else {
4499 uint32_t internalFrameNumber;
4500 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4501 request->frame_number = internalFrameNumber;
4502 return processCaptureRequest(request, internallyRequestedStreams);
4503 }
4504
4505 return NO_ERROR;
4506}
4507
4508/*===========================================================================
4509 * FUNCTION : orchestrateResult
4510 *
4511 * DESCRIPTION: Orchestrates a capture result to camera service
4512 *
4513 * PARAMETERS :
4514 * @request : request from framework to process
4515 *
4516 * RETURN :
4517 *
4518 *==========================================================================*/
4519void QCamera3HardwareInterface::orchestrateResult(
4520 camera3_capture_result_t *result)
4521{
4522 uint32_t frameworkFrameNumber;
4523 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4524 frameworkFrameNumber);
4525 if (rc != NO_ERROR) {
4526 LOGE("Cannot find translated frameworkFrameNumber");
4527 assert(0);
4528 } else {
4529 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004530 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004531 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004532 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004533 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4534 camera_metadata_entry_t entry;
4535 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4536 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004537 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004538 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4539 if (ret != OK)
4540 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004541 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004542 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004543 result->frame_number = frameworkFrameNumber;
4544 mCallbackOps->process_capture_result(mCallbackOps, result);
4545 }
4546 }
4547}
4548
4549/*===========================================================================
4550 * FUNCTION : orchestrateNotify
4551 *
4552 * DESCRIPTION: Orchestrates a notify to camera service
4553 *
4554 * PARAMETERS :
4555 * @request : request from framework to process
4556 *
4557 * RETURN :
4558 *
4559 *==========================================================================*/
4560void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4561{
4562 uint32_t frameworkFrameNumber;
4563 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004564 int32_t rc = NO_ERROR;
4565
4566 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004567 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004568
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004569 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004570 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4571 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4572 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004573 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004574 LOGE("Cannot find translated frameworkFrameNumber");
4575 assert(0);
4576 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004577 }
4578 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004579
4580 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4581 LOGD("Internal Request drop the notifyCb");
4582 } else {
4583 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4584 mCallbackOps->notify(mCallbackOps, notify_msg);
4585 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004586}
4587
4588/*===========================================================================
4589 * FUNCTION : FrameNumberRegistry
4590 *
4591 * DESCRIPTION: Constructor
4592 *
4593 * PARAMETERS :
4594 *
4595 * RETURN :
4596 *
4597 *==========================================================================*/
4598FrameNumberRegistry::FrameNumberRegistry()
4599{
4600 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4601}
4602
4603/*===========================================================================
4604 * FUNCTION : ~FrameNumberRegistry
4605 *
4606 * DESCRIPTION: Destructor
4607 *
4608 * PARAMETERS :
4609 *
4610 * RETURN :
4611 *
4612 *==========================================================================*/
4613FrameNumberRegistry::~FrameNumberRegistry()
4614{
4615}
4616
4617/*===========================================================================
4618 * FUNCTION : PurgeOldEntriesLocked
4619 *
4620 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4621 *
4622 * PARAMETERS :
4623 *
4624 * RETURN : NONE
4625 *
4626 *==========================================================================*/
4627void FrameNumberRegistry::purgeOldEntriesLocked()
4628{
4629 while (_register.begin() != _register.end()) {
4630 auto itr = _register.begin();
4631 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4632 _register.erase(itr);
4633 } else {
4634 return;
4635 }
4636 }
4637}
4638
4639/*===========================================================================
4640 * FUNCTION : allocStoreInternalFrameNumber
4641 *
4642 * DESCRIPTION: Method to note down a framework request and associate a new
4643 * internal request number against it
4644 *
4645 * PARAMETERS :
4646 * @fFrameNumber: Identifier given by framework
4647 * @internalFN : Output parameter which will have the newly generated internal
4648 * entry
4649 *
4650 * RETURN : Error code
4651 *
4652 *==========================================================================*/
4653int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4654 uint32_t &internalFrameNumber)
4655{
4656 Mutex::Autolock lock(mRegistryLock);
4657 internalFrameNumber = _nextFreeInternalNumber++;
4658 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4659 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4660 purgeOldEntriesLocked();
4661 return NO_ERROR;
4662}
4663
4664/*===========================================================================
4665 * FUNCTION : generateStoreInternalFrameNumber
4666 *
4667 * DESCRIPTION: Method to associate a new internal request number independent
4668 * of any associate with framework requests
4669 *
4670 * PARAMETERS :
4671 * @internalFrame#: Output parameter which will have the newly generated internal
4672 *
4673 *
4674 * RETURN : Error code
4675 *
4676 *==========================================================================*/
4677int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4678{
4679 Mutex::Autolock lock(mRegistryLock);
4680 internalFrameNumber = _nextFreeInternalNumber++;
4681 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4682 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4683 purgeOldEntriesLocked();
4684 return NO_ERROR;
4685}
4686
4687/*===========================================================================
4688 * FUNCTION : getFrameworkFrameNumber
4689 *
4690 * DESCRIPTION: Method to query the framework framenumber given an internal #
4691 *
4692 * PARAMETERS :
4693 * @internalFrame#: Internal reference
4694 * @frameworkframenumber: Output parameter holding framework frame entry
4695 *
4696 * RETURN : Error code
4697 *
4698 *==========================================================================*/
4699int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4700 uint32_t &frameworkFrameNumber)
4701{
4702 Mutex::Autolock lock(mRegistryLock);
4703 auto itr = _register.find(internalFrameNumber);
4704 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004705 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004706 return -ENOENT;
4707 }
4708
4709 frameworkFrameNumber = itr->second;
4710 purgeOldEntriesLocked();
4711 return NO_ERROR;
4712}
Thierry Strudel3d639192016-09-09 11:52:26 -07004713
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004714status_t QCamera3HardwareInterface::fillPbStreamConfig(
4715 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4716 QCamera3Channel *channel, uint32_t streamIndex) {
4717 if (config == nullptr) {
4718 LOGE("%s: config is null", __FUNCTION__);
4719 return BAD_VALUE;
4720 }
4721
4722 if (channel == nullptr) {
4723 LOGE("%s: channel is null", __FUNCTION__);
4724 return BAD_VALUE;
4725 }
4726
4727 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4728 if (stream == nullptr) {
4729 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4730 return NAME_NOT_FOUND;
4731 }
4732
4733 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4734 if (streamInfo == nullptr) {
4735 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4736 return NAME_NOT_FOUND;
4737 }
4738
4739 config->id = pbStreamId;
4740 config->image.width = streamInfo->dim.width;
4741 config->image.height = streamInfo->dim.height;
4742 config->image.padding = 0;
4743 config->image.format = pbStreamFormat;
4744
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004745 uint32_t totalPlaneSize = 0;
4746
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004747 // Fill plane information.
4748 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4749 pbcamera::PlaneConfiguration plane;
4750 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4751 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4752 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004753
4754 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004755 }
4756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004757 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004758 return OK;
4759}
4760
Thierry Strudel3d639192016-09-09 11:52:26 -07004761/*===========================================================================
4762 * FUNCTION : processCaptureRequest
4763 *
4764 * DESCRIPTION: process a capture request from camera service
4765 *
4766 * PARAMETERS :
4767 * @request : request from framework to process
4768 *
4769 * RETURN :
4770 *
4771 *==========================================================================*/
4772int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004773 camera3_capture_request_t *request,
4774 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004775{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 int rc = NO_ERROR;
4778 int32_t request_id;
4779 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004780 bool isVidBufRequested = false;
4781 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004782 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004783
4784 pthread_mutex_lock(&mMutex);
4785
4786 // Validate current state
4787 switch (mState) {
4788 case CONFIGURED:
4789 case STARTED:
4790 /* valid state */
4791 break;
4792
4793 case ERROR:
4794 pthread_mutex_unlock(&mMutex);
4795 handleCameraDeviceError();
4796 return -ENODEV;
4797
4798 default:
4799 LOGE("Invalid state %d", mState);
4800 pthread_mutex_unlock(&mMutex);
4801 return -ENODEV;
4802 }
4803
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004804 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 if (rc != NO_ERROR) {
4806 LOGE("incoming request is not valid");
4807 pthread_mutex_unlock(&mMutex);
4808 return rc;
4809 }
4810
4811 meta = request->settings;
4812
4813 // For first capture request, send capture intent, and
4814 // stream on all streams
4815 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004816 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 // send an unconfigure to the backend so that the isp
4818 // resources are deallocated
4819 if (!mFirstConfiguration) {
4820 cam_stream_size_info_t stream_config_info;
4821 int32_t hal_version = CAM_HAL_V3;
4822 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4823 stream_config_info.buffer_info.min_buffers =
4824 MIN_INFLIGHT_REQUESTS;
4825 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004826 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004827 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004828 clear_metadata_buffer(mParameters);
4829 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4830 CAM_INTF_PARM_HAL_VERSION, hal_version);
4831 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4832 CAM_INTF_META_STREAM_INFO, stream_config_info);
4833 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4834 mParameters);
4835 if (rc < 0) {
4836 LOGE("set_parms for unconfigure failed");
4837 pthread_mutex_unlock(&mMutex);
4838 return rc;
4839 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004840
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004842 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004844 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004845 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004846 property_get("persist.camera.is_type", is_type_value, "4");
4847 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4848 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4849 property_get("persist.camera.is_type_preview", is_type_value, "4");
4850 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4851 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004852
4853 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4854 int32_t hal_version = CAM_HAL_V3;
4855 uint8_t captureIntent =
4856 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4857 mCaptureIntent = captureIntent;
4858 clear_metadata_buffer(mParameters);
4859 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4860 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4861 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004862 if (mFirstConfiguration) {
4863 // configure instant AEC
4864 // Instant AEC is a session based parameter and it is needed only
4865 // once per complete session after open camera.
4866 // i.e. This is set only once for the first capture request, after open camera.
4867 setInstantAEC(meta);
4868 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004869 uint8_t fwkVideoStabMode=0;
4870 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4871 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4872 }
4873
Xue Tuecac74e2017-04-17 13:58:15 -07004874 // If EIS setprop is enabled then only turn it on for video/preview
4875 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004876 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 int32_t vsMode;
4878 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4879 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4880 rc = BAD_VALUE;
4881 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 LOGD("setEis %d", setEis);
4883 bool eis3Supported = false;
4884 size_t count = IS_TYPE_MAX;
4885 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4886 for (size_t i = 0; i < count; i++) {
4887 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4888 eis3Supported = true;
4889 break;
4890 }
4891 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004892
4893 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004894 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4896 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4898 is_type = isTypePreview;
4899 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4900 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4901 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004902 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004903 } else {
4904 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004905 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906 } else {
4907 is_type = IS_TYPE_NONE;
4908 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004910 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4912 }
4913 }
4914
4915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4916 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4917
Thierry Strudel54dc9782017-02-15 12:12:10 -08004918 //Disable tintless only if the property is set to 0
4919 memset(prop, 0, sizeof(prop));
4920 property_get("persist.camera.tintless.enable", prop, "1");
4921 int32_t tintless_value = atoi(prop);
4922
Thierry Strudel3d639192016-09-09 11:52:26 -07004923 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4924 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004925
Thierry Strudel3d639192016-09-09 11:52:26 -07004926 //Disable CDS for HFR mode or if DIS/EIS is on.
4927 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4928 //after every configure_stream
4929 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4930 (m_bIsVideo)) {
4931 int32_t cds = CAM_CDS_MODE_OFF;
4932 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4933 CAM_INTF_PARM_CDS_MODE, cds))
4934 LOGE("Failed to disable CDS for HFR mode");
4935
4936 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937
4938 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4939 uint8_t* use_av_timer = NULL;
4940
4941 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004942 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004943 use_av_timer = &m_debug_avtimer;
4944 }
4945 else{
4946 use_av_timer =
4947 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004948 if (use_av_timer) {
4949 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4950 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004951 }
4952
4953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4954 rc = BAD_VALUE;
4955 }
4956 }
4957
Thierry Strudel3d639192016-09-09 11:52:26 -07004958 setMobicat();
4959
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004960 uint8_t nrMode = 0;
4961 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4962 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4963 }
4964
Thierry Strudel3d639192016-09-09 11:52:26 -07004965 /* Set fps and hfr mode while sending meta stream info so that sensor
4966 * can configure appropriate streaming mode */
4967 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4969 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004970 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4971 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004972 if (rc == NO_ERROR) {
4973 int32_t max_fps =
4974 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004975 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004976 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4977 }
4978 /* For HFR, more buffers are dequeued upfront to improve the performance */
4979 if (mBatchSize) {
4980 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4981 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4982 }
4983 }
4984 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004985 LOGE("setHalFpsRange failed");
4986 }
4987 }
4988 if (meta.exists(ANDROID_CONTROL_MODE)) {
4989 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4990 rc = extractSceneMode(meta, metaMode, mParameters);
4991 if (rc != NO_ERROR) {
4992 LOGE("extractSceneMode failed");
4993 }
4994 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004995 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004996
Thierry Strudel04e026f2016-10-10 11:27:36 -07004997 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4998 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4999 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5000 rc = setVideoHdrMode(mParameters, vhdr);
5001 if (rc != NO_ERROR) {
5002 LOGE("setVideoHDR is failed");
5003 }
5004 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005006 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005007 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005008 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005009 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5011 sensorModeFullFov)) {
5012 rc = BAD_VALUE;
5013 }
5014 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005015 //TODO: validate the arguments, HSV scenemode should have only the
5016 //advertised fps ranges
5017
5018 /*set the capture intent, hal version, tintless, stream info,
5019 *and disenable parameters to the backend*/
5020 LOGD("set_parms META_STREAM_INFO " );
5021 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005022 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5023 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005024 mStreamConfigInfo.type[i],
5025 mStreamConfigInfo.stream_sizes[i].width,
5026 mStreamConfigInfo.stream_sizes[i].height,
5027 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005028 mStreamConfigInfo.format[i],
5029 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005030 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005031
Thierry Strudel3d639192016-09-09 11:52:26 -07005032 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5033 mParameters);
5034 if (rc < 0) {
5035 LOGE("set_parms failed for hal version, stream info");
5036 }
5037
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005038 cam_sensor_mode_info_t sensorModeInfo = {};
5039 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005040 if (rc != NO_ERROR) {
5041 LOGE("Failed to get sensor output size");
5042 pthread_mutex_unlock(&mMutex);
5043 goto error_exit;
5044 }
5045
5046 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5047 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005048 sensorModeInfo.active_array_size.width,
5049 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005050
5051 /* Set batchmode before initializing channel. Since registerBuffer
5052 * internally initializes some of the channels, better set batchmode
5053 * even before first register buffer */
5054 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5055 it != mStreamInfo.end(); it++) {
5056 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5057 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5058 && mBatchSize) {
5059 rc = channel->setBatchSize(mBatchSize);
5060 //Disable per frame map unmap for HFR/batchmode case
5061 rc |= channel->setPerFrameMapUnmap(false);
5062 if (NO_ERROR != rc) {
5063 LOGE("Channel init failed %d", rc);
5064 pthread_mutex_unlock(&mMutex);
5065 goto error_exit;
5066 }
5067 }
5068 }
5069
5070 //First initialize all streams
5071 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5072 it != mStreamInfo.end(); it++) {
5073 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005074
5075 /* Initial value of NR mode is needed before stream on */
5076 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5078 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005079 setEis) {
5080 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5081 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5082 is_type = mStreamConfigInfo.is_type[i];
5083 break;
5084 }
5085 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005086 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005087 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005088 rc = channel->initialize(IS_TYPE_NONE);
5089 }
5090 if (NO_ERROR != rc) {
5091 LOGE("Channel initialization failed %d", rc);
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095 }
5096
5097 if (mRawDumpChannel) {
5098 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5099 if (rc != NO_ERROR) {
5100 LOGE("Error: Raw Dump Channel init failed");
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005105 if (mHdrPlusRawSrcChannel) {
5106 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5107 if (rc != NO_ERROR) {
5108 LOGE("Error: HDR+ RAW Source Channel init failed");
5109 pthread_mutex_unlock(&mMutex);
5110 goto error_exit;
5111 }
5112 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 if (mSupportChannel) {
5114 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5115 if (rc < 0) {
5116 LOGE("Support channel initialization failed");
5117 pthread_mutex_unlock(&mMutex);
5118 goto error_exit;
5119 }
5120 }
5121 if (mAnalysisChannel) {
5122 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5123 if (rc < 0) {
5124 LOGE("Analysis channel initialization failed");
5125 pthread_mutex_unlock(&mMutex);
5126 goto error_exit;
5127 }
5128 }
5129 if (mDummyBatchChannel) {
5130 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5131 if (rc < 0) {
5132 LOGE("mDummyBatchChannel setBatchSize failed");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005136 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005137 if (rc < 0) {
5138 LOGE("mDummyBatchChannel initialization failed");
5139 pthread_mutex_unlock(&mMutex);
5140 goto error_exit;
5141 }
5142 }
5143
5144 // Set bundle info
5145 rc = setBundleInfo();
5146 if (rc < 0) {
5147 LOGE("setBundleInfo failed %d", rc);
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
5151
5152 //update settings from app here
5153 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5154 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5155 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5156 }
5157 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5158 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5159 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5160 }
5161 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5162 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5163 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5164
5165 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5166 (mLinkedCameraId != mCameraId) ) {
5167 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5168 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005169 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005170 goto error_exit;
5171 }
5172 }
5173
5174 // add bundle related cameras
5175 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5176 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005177 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5178 &m_pDualCamCmdPtr->bundle_info;
5179 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005180 if (mIsDeviceLinked)
5181 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5182 else
5183 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5184
5185 pthread_mutex_lock(&gCamLock);
5186
5187 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5188 LOGE("Dualcam: Invalid Session Id ");
5189 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005190 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 goto error_exit;
5192 }
5193
5194 if (mIsMainCamera == 1) {
5195 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5196 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005197 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005198 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005199 // related session id should be session id of linked session
5200 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5201 } else {
5202 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5203 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005204 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005205 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005206 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5207 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005208 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005209 pthread_mutex_unlock(&gCamLock);
5210
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005211 rc = mCameraHandle->ops->set_dual_cam_cmd(
5212 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 if (rc < 0) {
5214 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005215 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 goto error_exit;
5217 }
5218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005219 goto no_error;
5220error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005221 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005222 return rc;
5223no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 mWokenUpByDaemon = false;
5225 mPendingLiveRequest = 0;
5226 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 }
5228
5229 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005230 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231
5232 if (mFlushPerf) {
5233 //we cannot accept any requests during flush
5234 LOGE("process_capture_request cannot proceed during flush");
5235 pthread_mutex_unlock(&mMutex);
5236 return NO_ERROR; //should return an error
5237 }
5238
5239 if (meta.exists(ANDROID_REQUEST_ID)) {
5240 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5241 mCurrentRequestId = request_id;
5242 LOGD("Received request with id: %d", request_id);
5243 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5244 LOGE("Unable to find request id field, \
5245 & no previous id available");
5246 pthread_mutex_unlock(&mMutex);
5247 return NAME_NOT_FOUND;
5248 } else {
5249 LOGD("Re-using old request id");
5250 request_id = mCurrentRequestId;
5251 }
5252
5253 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5254 request->num_output_buffers,
5255 request->input_buffer,
5256 frameNumber);
5257 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005258 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005260 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 uint32_t snapshotStreamId = 0;
5262 for (size_t i = 0; i < request->num_output_buffers; i++) {
5263 const camera3_stream_buffer_t& output = request->output_buffers[i];
5264 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5265
Emilian Peev7650c122017-01-19 08:24:33 -08005266 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5267 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005268 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005269 blob_request = 1;
5270 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5271 }
5272
5273 if (output.acquire_fence != -1) {
5274 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5275 close(output.acquire_fence);
5276 if (rc != OK) {
5277 LOGE("sync wait failed %d", rc);
5278 pthread_mutex_unlock(&mMutex);
5279 return rc;
5280 }
5281 }
5282
Emilian Peev0f3c3162017-03-15 12:57:46 +00005283 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5284 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005285 depthRequestPresent = true;
5286 continue;
5287 }
5288
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005289 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005290 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005291
5292 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5293 isVidBufRequested = true;
5294 }
5295 }
5296
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005297 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5298 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5299 itr++) {
5300 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5301 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5302 channel->getStreamID(channel->getStreamTypeMask());
5303
5304 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5305 isVidBufRequested = true;
5306 }
5307 }
5308
Thierry Strudel3d639192016-09-09 11:52:26 -07005309 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005310 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005311 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 }
5313 if (blob_request && mRawDumpChannel) {
5314 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005315 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005316 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005317 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005318 }
5319
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005320 {
5321 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5322 // Request a RAW buffer if
5323 // 1. mHdrPlusRawSrcChannel is valid.
5324 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5325 // 3. There is no pending HDR+ request.
5326 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5327 mHdrPlusPendingRequests.size() == 0) {
5328 streamsArray.stream_request[streamsArray.num_streams].streamID =
5329 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5330 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5331 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005332 }
5333
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005334 //extract capture intent
5335 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5336 mCaptureIntent =
5337 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5338 }
5339
5340 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5341 mCacMode =
5342 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5343 }
5344
5345 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005346 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005347
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005348 {
5349 Mutex::Autolock l(gHdrPlusClientLock);
5350 // If this request has a still capture intent, try to submit an HDR+ request.
5351 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5352 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5353 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5354 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005355 }
5356
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005357 if (hdrPlusRequest) {
5358 // For a HDR+ request, just set the frame parameters.
5359 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5360 if (rc < 0) {
5361 LOGE("fail to set frame parameters");
5362 pthread_mutex_unlock(&mMutex);
5363 return rc;
5364 }
5365 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005366 /* Parse the settings:
5367 * - For every request in NORMAL MODE
5368 * - For every request in HFR mode during preview only case
5369 * - For first request of every batch in HFR mode during video
5370 * recording. In batchmode the same settings except frame number is
5371 * repeated in each request of the batch.
5372 */
5373 if (!mBatchSize ||
5374 (mBatchSize && !isVidBufRequested) ||
5375 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005376 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005377 if (rc < 0) {
5378 LOGE("fail to set frame parameters");
5379 pthread_mutex_unlock(&mMutex);
5380 return rc;
5381 }
5382 }
5383 /* For batchMode HFR, setFrameParameters is not called for every
5384 * request. But only frame number of the latest request is parsed.
5385 * Keep track of first and last frame numbers in a batch so that
5386 * metadata for the frame numbers of batch can be duplicated in
5387 * handleBatchMetadta */
5388 if (mBatchSize) {
5389 if (!mToBeQueuedVidBufs) {
5390 //start of the batch
5391 mFirstFrameNumberInBatch = request->frame_number;
5392 }
5393 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5394 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5395 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005396 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 return BAD_VALUE;
5398 }
5399 }
5400 if (mNeedSensorRestart) {
5401 /* Unlock the mutex as restartSensor waits on the channels to be
5402 * stopped, which in turn calls stream callback functions -
5403 * handleBufferWithLock and handleMetadataWithLock */
5404 pthread_mutex_unlock(&mMutex);
5405 rc = dynamicUpdateMetaStreamInfo();
5406 if (rc != NO_ERROR) {
5407 LOGE("Restarting the sensor failed");
5408 return BAD_VALUE;
5409 }
5410 mNeedSensorRestart = false;
5411 pthread_mutex_lock(&mMutex);
5412 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005413 if(mResetInstantAEC) {
5414 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5415 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5416 mResetInstantAEC = false;
5417 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005418 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005419 if (request->input_buffer->acquire_fence != -1) {
5420 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5421 close(request->input_buffer->acquire_fence);
5422 if (rc != OK) {
5423 LOGE("input buffer sync wait failed %d", rc);
5424 pthread_mutex_unlock(&mMutex);
5425 return rc;
5426 }
5427 }
5428 }
5429
5430 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5431 mLastCustIntentFrmNum = frameNumber;
5432 }
5433 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005434 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005435 pendingRequestIterator latestRequest;
5436 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005437 pendingRequest.num_buffers = depthRequestPresent ?
5438 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005439 pendingRequest.request_id = request_id;
5440 pendingRequest.blob_request = blob_request;
5441 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005442 if (request->input_buffer) {
5443 pendingRequest.input_buffer =
5444 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5445 *(pendingRequest.input_buffer) = *(request->input_buffer);
5446 pInputBuffer = pendingRequest.input_buffer;
5447 } else {
5448 pendingRequest.input_buffer = NULL;
5449 pInputBuffer = NULL;
5450 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005451 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005452
5453 pendingRequest.pipeline_depth = 0;
5454 pendingRequest.partial_result_cnt = 0;
5455 extractJpegMetadata(mCurJpegMeta, request);
5456 pendingRequest.jpegMetadata = mCurJpegMeta;
5457 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005458 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005459 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5460 mHybridAeEnable =
5461 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5462 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005463
5464 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5465 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005466 /* DevCamDebug metadata processCaptureRequest */
5467 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5468 mDevCamDebugMetaEnable =
5469 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5470 }
5471 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5472 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005473
5474 //extract CAC info
5475 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5476 mCacMode =
5477 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5478 }
5479 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005480 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005481
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005482 // extract enableZsl info
5483 if (gExposeEnableZslKey) {
5484 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5485 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5486 mZslEnabled = pendingRequest.enableZsl;
5487 } else {
5488 pendingRequest.enableZsl = mZslEnabled;
5489 }
5490 }
5491
Thierry Strudel3d639192016-09-09 11:52:26 -07005492 PendingBuffersInRequest bufsForCurRequest;
5493 bufsForCurRequest.frame_number = frameNumber;
5494 // Mark current timestamp for the new request
5495 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005496 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005497
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005498 if (hdrPlusRequest) {
5499 // Save settings for this request.
5500 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5501 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5502
5503 // Add to pending HDR+ request queue.
5504 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5505 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5506
5507 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5508 }
5509
Thierry Strudel3d639192016-09-09 11:52:26 -07005510 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005511 if ((request->output_buffers[i].stream->data_space ==
5512 HAL_DATASPACE_DEPTH) &&
5513 (HAL_PIXEL_FORMAT_BLOB ==
5514 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005515 continue;
5516 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005517 RequestedBufferInfo requestedBuf;
5518 memset(&requestedBuf, 0, sizeof(requestedBuf));
5519 requestedBuf.stream = request->output_buffers[i].stream;
5520 requestedBuf.buffer = NULL;
5521 pendingRequest.buffers.push_back(requestedBuf);
5522
5523 // Add to buffer handle the pending buffers list
5524 PendingBufferInfo bufferInfo;
5525 bufferInfo.buffer = request->output_buffers[i].buffer;
5526 bufferInfo.stream = request->output_buffers[i].stream;
5527 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5528 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5529 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5530 frameNumber, bufferInfo.buffer,
5531 channel->getStreamTypeMask(), bufferInfo.stream->format);
5532 }
5533 // Add this request packet into mPendingBuffersMap
5534 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5535 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5536 mPendingBuffersMap.get_num_overall_buffers());
5537
5538 latestRequest = mPendingRequestsList.insert(
5539 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005540
5541 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5542 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005543 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005544 for (size_t i = 0; i < request->num_output_buffers; i++) {
5545 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5546 }
5547
Thierry Strudel3d639192016-09-09 11:52:26 -07005548 if(mFlush) {
5549 LOGI("mFlush is true");
5550 pthread_mutex_unlock(&mMutex);
5551 return NO_ERROR;
5552 }
5553
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005554 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5555 // channel.
5556 if (!hdrPlusRequest) {
5557 int indexUsed;
5558 // Notify metadata channel we receive a request
5559 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005560
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005561 if(request->input_buffer != NULL){
5562 LOGD("Input request, frame_number %d", frameNumber);
5563 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5564 if (NO_ERROR != rc) {
5565 LOGE("fail to set reproc parameters");
5566 pthread_mutex_unlock(&mMutex);
5567 return rc;
5568 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005569 }
5570
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005571 // Call request on other streams
5572 uint32_t streams_need_metadata = 0;
5573 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5574 for (size_t i = 0; i < request->num_output_buffers; i++) {
5575 const camera3_stream_buffer_t& output = request->output_buffers[i];
5576 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5577
5578 if (channel == NULL) {
5579 LOGW("invalid channel pointer for stream");
5580 continue;
5581 }
5582
5583 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5584 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5585 output.buffer, request->input_buffer, frameNumber);
5586 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005587 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005588 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5589 if (rc < 0) {
5590 LOGE("Fail to request on picture channel");
5591 pthread_mutex_unlock(&mMutex);
5592 return rc;
5593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005594 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005595 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5596 assert(NULL != mDepthChannel);
5597 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005598
Emilian Peev7650c122017-01-19 08:24:33 -08005599 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5600 if (rc < 0) {
5601 LOGE("Fail to map on depth buffer");
5602 pthread_mutex_unlock(&mMutex);
5603 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005604 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005605 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005606 } else {
5607 LOGD("snapshot request with buffer %p, frame_number %d",
5608 output.buffer, frameNumber);
5609 if (!request->settings) {
5610 rc = channel->request(output.buffer, frameNumber,
5611 NULL, mPrevParameters, indexUsed);
5612 } else {
5613 rc = channel->request(output.buffer, frameNumber,
5614 NULL, mParameters, indexUsed);
5615 }
5616 if (rc < 0) {
5617 LOGE("Fail to request on picture channel");
5618 pthread_mutex_unlock(&mMutex);
5619 return rc;
5620 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005621
Emilian Peev7650c122017-01-19 08:24:33 -08005622 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5623 uint32_t j = 0;
5624 for (j = 0; j < streamsArray.num_streams; j++) {
5625 if (streamsArray.stream_request[j].streamID == streamId) {
5626 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5627 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5628 else
5629 streamsArray.stream_request[j].buf_index = indexUsed;
5630 break;
5631 }
5632 }
5633 if (j == streamsArray.num_streams) {
5634 LOGE("Did not find matching stream to update index");
5635 assert(0);
5636 }
5637
5638 pendingBufferIter->need_metadata = true;
5639 streams_need_metadata++;
5640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005642 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5643 bool needMetadata = false;
5644 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5645 rc = yuvChannel->request(output.buffer, frameNumber,
5646 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5647 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005648 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005649 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005650 pthread_mutex_unlock(&mMutex);
5651 return rc;
5652 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005653
5654 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5655 uint32_t j = 0;
5656 for (j = 0; j < streamsArray.num_streams; j++) {
5657 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5659 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5660 else
5661 streamsArray.stream_request[j].buf_index = indexUsed;
5662 break;
5663 }
5664 }
5665 if (j == streamsArray.num_streams) {
5666 LOGE("Did not find matching stream to update index");
5667 assert(0);
5668 }
5669
5670 pendingBufferIter->need_metadata = needMetadata;
5671 if (needMetadata)
5672 streams_need_metadata += 1;
5673 LOGD("calling YUV channel request, need_metadata is %d",
5674 needMetadata);
5675 } else {
5676 LOGD("request with buffer %p, frame_number %d",
5677 output.buffer, frameNumber);
5678
5679 rc = channel->request(output.buffer, frameNumber, indexUsed);
5680
5681 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5682 uint32_t j = 0;
5683 for (j = 0; j < streamsArray.num_streams; j++) {
5684 if (streamsArray.stream_request[j].streamID == streamId) {
5685 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5686 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5687 else
5688 streamsArray.stream_request[j].buf_index = indexUsed;
5689 break;
5690 }
5691 }
5692 if (j == streamsArray.num_streams) {
5693 LOGE("Did not find matching stream to update index");
5694 assert(0);
5695 }
5696
5697 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5698 && mBatchSize) {
5699 mToBeQueuedVidBufs++;
5700 if (mToBeQueuedVidBufs == mBatchSize) {
5701 channel->queueBatchBuf();
5702 }
5703 }
5704 if (rc < 0) {
5705 LOGE("request failed");
5706 pthread_mutex_unlock(&mMutex);
5707 return rc;
5708 }
5709 }
5710 pendingBufferIter++;
5711 }
5712
5713 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5714 itr++) {
5715 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5716
5717 if (channel == NULL) {
5718 LOGE("invalid channel pointer for stream");
5719 assert(0);
5720 return BAD_VALUE;
5721 }
5722
5723 InternalRequest requestedStream;
5724 requestedStream = (*itr);
5725
5726
5727 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5728 LOGD("snapshot request internally input buffer %p, frame_number %d",
5729 request->input_buffer, frameNumber);
5730 if(request->input_buffer != NULL){
5731 rc = channel->request(NULL, frameNumber,
5732 pInputBuffer, &mReprocMeta, indexUsed, true,
5733 requestedStream.meteringOnly);
5734 if (rc < 0) {
5735 LOGE("Fail to request on picture channel");
5736 pthread_mutex_unlock(&mMutex);
5737 return rc;
5738 }
5739 } else {
5740 LOGD("snapshot request with frame_number %d", frameNumber);
5741 if (!request->settings) {
5742 rc = channel->request(NULL, frameNumber,
5743 NULL, mPrevParameters, indexUsed, true,
5744 requestedStream.meteringOnly);
5745 } else {
5746 rc = channel->request(NULL, frameNumber,
5747 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5748 }
5749 if (rc < 0) {
5750 LOGE("Fail to request on picture channel");
5751 pthread_mutex_unlock(&mMutex);
5752 return rc;
5753 }
5754
5755 if ((*itr).meteringOnly != 1) {
5756 requestedStream.need_metadata = 1;
5757 streams_need_metadata++;
5758 }
5759 }
5760
5761 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5762 uint32_t j = 0;
5763 for (j = 0; j < streamsArray.num_streams; j++) {
5764 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005765 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5766 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5767 else
5768 streamsArray.stream_request[j].buf_index = indexUsed;
5769 break;
5770 }
5771 }
5772 if (j == streamsArray.num_streams) {
5773 LOGE("Did not find matching stream to update index");
5774 assert(0);
5775 }
5776
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005779 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005780 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005781 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005783 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005784
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005785 //If 2 streams have need_metadata set to true, fail the request, unless
5786 //we copy/reference count the metadata buffer
5787 if (streams_need_metadata > 1) {
5788 LOGE("not supporting request in which two streams requires"
5789 " 2 HAL metadata for reprocessing");
5790 pthread_mutex_unlock(&mMutex);
5791 return -EINVAL;
5792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005793
Emilian Peev656e4fa2017-06-02 16:47:04 +01005794 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5795 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5796 if (depthRequestPresent && mDepthChannel) {
5797 if (request->settings) {
5798 camera_metadata_ro_entry entry;
5799 if (find_camera_metadata_ro_entry(request->settings,
5800 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5801 if (entry.data.u8[0]) {
5802 pdafEnable = CAM_PD_DATA_ENABLED;
5803 } else {
5804 pdafEnable = CAM_PD_DATA_SKIP;
5805 }
5806 mDepthCloudMode = pdafEnable;
5807 } else {
5808 pdafEnable = mDepthCloudMode;
5809 }
5810 } else {
5811 pdafEnable = mDepthCloudMode;
5812 }
5813 }
5814
Emilian Peev7650c122017-01-19 08:24:33 -08005815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5816 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5817 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5818 pthread_mutex_unlock(&mMutex);
5819 return BAD_VALUE;
5820 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005821
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005822 if (request->input_buffer == NULL) {
5823 /* Set the parameters to backend:
5824 * - For every request in NORMAL MODE
5825 * - For every request in HFR mode during preview only case
5826 * - Once every batch in HFR mode during video recording
5827 */
5828 if (!mBatchSize ||
5829 (mBatchSize && !isVidBufRequested) ||
5830 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5831 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5832 mBatchSize, isVidBufRequested,
5833 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005834
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005835 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5836 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5837 uint32_t m = 0;
5838 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5839 if (streamsArray.stream_request[k].streamID ==
5840 mBatchedStreamsArray.stream_request[m].streamID)
5841 break;
5842 }
5843 if (m == mBatchedStreamsArray.num_streams) {
5844 mBatchedStreamsArray.stream_request\
5845 [mBatchedStreamsArray.num_streams].streamID =
5846 streamsArray.stream_request[k].streamID;
5847 mBatchedStreamsArray.stream_request\
5848 [mBatchedStreamsArray.num_streams].buf_index =
5849 streamsArray.stream_request[k].buf_index;
5850 mBatchedStreamsArray.num_streams =
5851 mBatchedStreamsArray.num_streams + 1;
5852 }
5853 }
5854 streamsArray = mBatchedStreamsArray;
5855 }
5856 /* Update stream id of all the requested buffers */
5857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5858 streamsArray)) {
5859 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005860 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005861 return BAD_VALUE;
5862 }
5863
5864 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5865 mParameters);
5866 if (rc < 0) {
5867 LOGE("set_parms failed");
5868 }
5869 /* reset to zero coz, the batch is queued */
5870 mToBeQueuedVidBufs = 0;
5871 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5872 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5873 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005874 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5875 uint32_t m = 0;
5876 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5877 if (streamsArray.stream_request[k].streamID ==
5878 mBatchedStreamsArray.stream_request[m].streamID)
5879 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005880 }
5881 if (m == mBatchedStreamsArray.num_streams) {
5882 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5883 streamID = streamsArray.stream_request[k].streamID;
5884 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5885 buf_index = streamsArray.stream_request[k].buf_index;
5886 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5887 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005888 }
5889 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005890 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005891
5892 // Start all streams after the first setting is sent, so that the
5893 // setting can be applied sooner: (0 + apply_delay)th frame.
5894 if (mState == CONFIGURED && mChannelHandle) {
5895 //Then start them.
5896 LOGH("Start META Channel");
5897 rc = mMetadataChannel->start();
5898 if (rc < 0) {
5899 LOGE("META channel start failed");
5900 pthread_mutex_unlock(&mMutex);
5901 return rc;
5902 }
5903
5904 if (mAnalysisChannel) {
5905 rc = mAnalysisChannel->start();
5906 if (rc < 0) {
5907 LOGE("Analysis channel start failed");
5908 mMetadataChannel->stop();
5909 pthread_mutex_unlock(&mMutex);
5910 return rc;
5911 }
5912 }
5913
5914 if (mSupportChannel) {
5915 rc = mSupportChannel->start();
5916 if (rc < 0) {
5917 LOGE("Support channel start failed");
5918 mMetadataChannel->stop();
5919 /* Although support and analysis are mutually exclusive today
5920 adding it in anycase for future proofing */
5921 if (mAnalysisChannel) {
5922 mAnalysisChannel->stop();
5923 }
5924 pthread_mutex_unlock(&mMutex);
5925 return rc;
5926 }
5927 }
5928 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5929 it != mStreamInfo.end(); it++) {
5930 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5931 LOGH("Start Processing Channel mask=%d",
5932 channel->getStreamTypeMask());
5933 rc = channel->start();
5934 if (rc < 0) {
5935 LOGE("channel start failed");
5936 pthread_mutex_unlock(&mMutex);
5937 return rc;
5938 }
5939 }
5940
5941 if (mRawDumpChannel) {
5942 LOGD("Starting raw dump stream");
5943 rc = mRawDumpChannel->start();
5944 if (rc != NO_ERROR) {
5945 LOGE("Error Starting Raw Dump Channel");
5946 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5947 it != mStreamInfo.end(); it++) {
5948 QCamera3Channel *channel =
5949 (QCamera3Channel *)(*it)->stream->priv;
5950 LOGH("Stopping Processing Channel mask=%d",
5951 channel->getStreamTypeMask());
5952 channel->stop();
5953 }
5954 if (mSupportChannel)
5955 mSupportChannel->stop();
5956 if (mAnalysisChannel) {
5957 mAnalysisChannel->stop();
5958 }
5959 mMetadataChannel->stop();
5960 pthread_mutex_unlock(&mMutex);
5961 return rc;
5962 }
5963 }
5964
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005965 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005966 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005967 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005968 if (rc != NO_ERROR) {
5969 LOGE("start_channel failed %d", rc);
5970 pthread_mutex_unlock(&mMutex);
5971 return rc;
5972 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005973
5974 {
5975 // Configure Easel for stream on.
5976 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005977
5978 // Now that sensor mode should have been selected, get the selected sensor mode
5979 // info.
5980 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5981 getCurrentSensorModeInfo(mSensorModeInfo);
5982
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005983 if (EaselManagerClientOpened) {
5984 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005985 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5986 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005987 if (rc != OK) {
5988 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5989 mCameraId, mSensorModeInfo.op_pixel_clk);
5990 pthread_mutex_unlock(&mMutex);
5991 return rc;
5992 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005993 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005994 }
5995 }
5996
5997 // Start sensor streaming.
5998 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5999 mChannelHandle);
6000 if (rc != NO_ERROR) {
6001 LOGE("start_sensor_stream_on failed %d", rc);
6002 pthread_mutex_unlock(&mMutex);
6003 return rc;
6004 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006006 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006007 }
6008
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006009 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006010 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006011 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006012 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006013 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6014 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6015 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6016 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6017 rc = enableHdrPlusModeLocked();
6018 if (rc != OK) {
6019 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023
6024 mFirstPreviewIntentSeen = true;
6025 }
6026 }
6027
Thierry Strudel3d639192016-09-09 11:52:26 -07006028 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6029
6030 mState = STARTED;
6031 // Added a timed condition wait
6032 struct timespec ts;
6033 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006034 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006035 if (rc < 0) {
6036 isValidTimeout = 0;
6037 LOGE("Error reading the real time clock!!");
6038 }
6039 else {
6040 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006041 int64_t timeout = 5;
6042 {
6043 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6044 // If there is a pending HDR+ request, the following requests may be blocked until the
6045 // HDR+ request is done. So allow a longer timeout.
6046 if (mHdrPlusPendingRequests.size() > 0) {
6047 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6048 }
6049 }
6050 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006051 }
6052 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006053 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006054 (mState != ERROR) && (mState != DEINIT)) {
6055 if (!isValidTimeout) {
6056 LOGD("Blocking on conditional wait");
6057 pthread_cond_wait(&mRequestCond, &mMutex);
6058 }
6059 else {
6060 LOGD("Blocking on timed conditional wait");
6061 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6062 if (rc == ETIMEDOUT) {
6063 rc = -ENODEV;
6064 LOGE("Unblocked on timeout!!!!");
6065 break;
6066 }
6067 }
6068 LOGD("Unblocked");
6069 if (mWokenUpByDaemon) {
6070 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006071 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006072 break;
6073 }
6074 }
6075 pthread_mutex_unlock(&mMutex);
6076
6077 return rc;
6078}
6079
6080/*===========================================================================
6081 * FUNCTION : dump
6082 *
6083 * DESCRIPTION:
6084 *
6085 * PARAMETERS :
6086 *
6087 *
6088 * RETURN :
6089 *==========================================================================*/
6090void QCamera3HardwareInterface::dump(int fd)
6091{
6092 pthread_mutex_lock(&mMutex);
6093 dprintf(fd, "\n Camera HAL3 information Begin \n");
6094
6095 dprintf(fd, "\nNumber of pending requests: %zu \n",
6096 mPendingRequestsList.size());
6097 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6098 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6099 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6100 for(pendingRequestIterator i = mPendingRequestsList.begin();
6101 i != mPendingRequestsList.end(); i++) {
6102 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6103 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6104 i->input_buffer);
6105 }
6106 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6107 mPendingBuffersMap.get_num_overall_buffers());
6108 dprintf(fd, "-------+------------------\n");
6109 dprintf(fd, " Frame | Stream type mask \n");
6110 dprintf(fd, "-------+------------------\n");
6111 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6112 for(auto &j : req.mPendingBufferList) {
6113 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6114 dprintf(fd, " %5d | %11d \n",
6115 req.frame_number, channel->getStreamTypeMask());
6116 }
6117 }
6118 dprintf(fd, "-------+------------------\n");
6119
6120 dprintf(fd, "\nPending frame drop list: %zu\n",
6121 mPendingFrameDropList.size());
6122 dprintf(fd, "-------+-----------\n");
6123 dprintf(fd, " Frame | Stream ID \n");
6124 dprintf(fd, "-------+-----------\n");
6125 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6126 i != mPendingFrameDropList.end(); i++) {
6127 dprintf(fd, " %5d | %9d \n",
6128 i->frame_number, i->stream_ID);
6129 }
6130 dprintf(fd, "-------+-----------\n");
6131
6132 dprintf(fd, "\n Camera HAL3 information End \n");
6133
6134 /* use dumpsys media.camera as trigger to send update debug level event */
6135 mUpdateDebugLevel = true;
6136 pthread_mutex_unlock(&mMutex);
6137 return;
6138}
6139
6140/*===========================================================================
6141 * FUNCTION : flush
6142 *
6143 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6144 * conditionally restarts channels
6145 *
6146 * PARAMETERS :
6147 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006148 * @ stopChannelImmediately: stop the channel immediately. This should be used
6149 * when device encountered an error and MIPI may has
6150 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006151 *
6152 * RETURN :
6153 * 0 on success
6154 * Error code on failure
6155 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006156int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006157{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006158 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006159 int32_t rc = NO_ERROR;
6160
6161 LOGD("Unblocking Process Capture Request");
6162 pthread_mutex_lock(&mMutex);
6163 mFlush = true;
6164 pthread_mutex_unlock(&mMutex);
6165
6166 rc = stopAllChannels();
6167 // unlink of dualcam
6168 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006169 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6170 &m_pDualCamCmdPtr->bundle_info;
6171 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006172 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6173 pthread_mutex_lock(&gCamLock);
6174
6175 if (mIsMainCamera == 1) {
6176 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6177 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006178 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006179 // related session id should be session id of linked session
6180 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6181 } else {
6182 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6183 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006184 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006185 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6186 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006187 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006188 pthread_mutex_unlock(&gCamLock);
6189
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006190 rc = mCameraHandle->ops->set_dual_cam_cmd(
6191 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006192 if (rc < 0) {
6193 LOGE("Dualcam: Unlink failed, but still proceed to close");
6194 }
6195 }
6196
6197 if (rc < 0) {
6198 LOGE("stopAllChannels failed");
6199 return rc;
6200 }
6201 if (mChannelHandle) {
6202 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006203 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006204 }
6205
6206 // Reset bundle info
6207 rc = setBundleInfo();
6208 if (rc < 0) {
6209 LOGE("setBundleInfo failed %d", rc);
6210 return rc;
6211 }
6212
6213 // Mutex Lock
6214 pthread_mutex_lock(&mMutex);
6215
6216 // Unblock process_capture_request
6217 mPendingLiveRequest = 0;
6218 pthread_cond_signal(&mRequestCond);
6219
6220 rc = notifyErrorForPendingRequests();
6221 if (rc < 0) {
6222 LOGE("notifyErrorForPendingRequests failed");
6223 pthread_mutex_unlock(&mMutex);
6224 return rc;
6225 }
6226
6227 mFlush = false;
6228
6229 // Start the Streams/Channels
6230 if (restartChannels) {
6231 rc = startAllChannels();
6232 if (rc < 0) {
6233 LOGE("startAllChannels failed");
6234 pthread_mutex_unlock(&mMutex);
6235 return rc;
6236 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006237 if (mChannelHandle) {
6238 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006239 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006240 if (rc < 0) {
6241 LOGE("start_channel failed");
6242 pthread_mutex_unlock(&mMutex);
6243 return rc;
6244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006245 }
6246 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 pthread_mutex_unlock(&mMutex);
6248
6249 return 0;
6250}
6251
6252/*===========================================================================
6253 * FUNCTION : flushPerf
6254 *
6255 * DESCRIPTION: This is the performance optimization version of flush that does
6256 * not use stream off, rather flushes the system
6257 *
6258 * PARAMETERS :
6259 *
6260 *
6261 * RETURN : 0 : success
6262 * -EINVAL: input is malformed (device is not valid)
6263 * -ENODEV: if the device has encountered a serious error
6264 *==========================================================================*/
6265int QCamera3HardwareInterface::flushPerf()
6266{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006267 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006268 int32_t rc = 0;
6269 struct timespec timeout;
6270 bool timed_wait = false;
6271
6272 pthread_mutex_lock(&mMutex);
6273 mFlushPerf = true;
6274 mPendingBuffersMap.numPendingBufsAtFlush =
6275 mPendingBuffersMap.get_num_overall_buffers();
6276 LOGD("Calling flush. Wait for %d buffers to return",
6277 mPendingBuffersMap.numPendingBufsAtFlush);
6278
6279 /* send the flush event to the backend */
6280 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6281 if (rc < 0) {
6282 LOGE("Error in flush: IOCTL failure");
6283 mFlushPerf = false;
6284 pthread_mutex_unlock(&mMutex);
6285 return -ENODEV;
6286 }
6287
6288 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6289 LOGD("No pending buffers in HAL, return flush");
6290 mFlushPerf = false;
6291 pthread_mutex_unlock(&mMutex);
6292 return rc;
6293 }
6294
6295 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006296 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006297 if (rc < 0) {
6298 LOGE("Error reading the real time clock, cannot use timed wait");
6299 } else {
6300 timeout.tv_sec += FLUSH_TIMEOUT;
6301 timed_wait = true;
6302 }
6303
6304 //Block on conditional variable
6305 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6306 LOGD("Waiting on mBuffersCond");
6307 if (!timed_wait) {
6308 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6309 if (rc != 0) {
6310 LOGE("pthread_cond_wait failed due to rc = %s",
6311 strerror(rc));
6312 break;
6313 }
6314 } else {
6315 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6316 if (rc != 0) {
6317 LOGE("pthread_cond_timedwait failed due to rc = %s",
6318 strerror(rc));
6319 break;
6320 }
6321 }
6322 }
6323 if (rc != 0) {
6324 mFlushPerf = false;
6325 pthread_mutex_unlock(&mMutex);
6326 return -ENODEV;
6327 }
6328
6329 LOGD("Received buffers, now safe to return them");
6330
6331 //make sure the channels handle flush
6332 //currently only required for the picture channel to release snapshot resources
6333 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6334 it != mStreamInfo.end(); it++) {
6335 QCamera3Channel *channel = (*it)->channel;
6336 if (channel) {
6337 rc = channel->flush();
6338 if (rc) {
6339 LOGE("Flushing the channels failed with error %d", rc);
6340 // even though the channel flush failed we need to continue and
6341 // return the buffers we have to the framework, however the return
6342 // value will be an error
6343 rc = -ENODEV;
6344 }
6345 }
6346 }
6347
6348 /* notify the frameworks and send errored results */
6349 rc = notifyErrorForPendingRequests();
6350 if (rc < 0) {
6351 LOGE("notifyErrorForPendingRequests failed");
6352 pthread_mutex_unlock(&mMutex);
6353 return rc;
6354 }
6355
6356 //unblock process_capture_request
6357 mPendingLiveRequest = 0;
6358 unblockRequestIfNecessary();
6359
6360 mFlushPerf = false;
6361 pthread_mutex_unlock(&mMutex);
6362 LOGD ("Flush Operation complete. rc = %d", rc);
6363 return rc;
6364}
6365
6366/*===========================================================================
6367 * FUNCTION : handleCameraDeviceError
6368 *
6369 * DESCRIPTION: This function calls internal flush and notifies the error to
6370 * framework and updates the state variable.
6371 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006372 * PARAMETERS :
6373 * @stopChannelImmediately : stop channels immediately without waiting for
6374 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006375 *
6376 * RETURN : NO_ERROR on Success
6377 * Error code on failure
6378 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006379int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006380{
6381 int32_t rc = NO_ERROR;
6382
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006383 {
6384 Mutex::Autolock lock(mFlushLock);
6385 pthread_mutex_lock(&mMutex);
6386 if (mState != ERROR) {
6387 //if mState != ERROR, nothing to be done
6388 pthread_mutex_unlock(&mMutex);
6389 return NO_ERROR;
6390 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006391 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006392
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006393 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006394 if (NO_ERROR != rc) {
6395 LOGE("internal flush to handle mState = ERROR failed");
6396 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006397
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006398 pthread_mutex_lock(&mMutex);
6399 mState = DEINIT;
6400 pthread_mutex_unlock(&mMutex);
6401 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006402
6403 camera3_notify_msg_t notify_msg;
6404 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6405 notify_msg.type = CAMERA3_MSG_ERROR;
6406 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6407 notify_msg.message.error.error_stream = NULL;
6408 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006409 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006410
6411 return rc;
6412}
6413
6414/*===========================================================================
6415 * FUNCTION : captureResultCb
6416 *
6417 * DESCRIPTION: Callback handler for all capture result
6418 * (streams, as well as metadata)
6419 *
6420 * PARAMETERS :
6421 * @metadata : metadata information
6422 * @buffer : actual gralloc buffer to be returned to frameworks.
6423 * NULL if metadata.
6424 *
6425 * RETURN : NONE
6426 *==========================================================================*/
6427void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6428 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6429{
6430 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006431 pthread_mutex_lock(&mMutex);
6432 uint8_t batchSize = mBatchSize;
6433 pthread_mutex_unlock(&mMutex);
6434 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006435 handleBatchMetadata(metadata_buf,
6436 true /* free_and_bufdone_meta_buf */);
6437 } else { /* mBatchSize = 0 */
6438 hdrPlusPerfLock(metadata_buf);
6439 pthread_mutex_lock(&mMutex);
6440 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006441 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006442 true /* last urgent frame of batch metadata */,
6443 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006444 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006445 pthread_mutex_unlock(&mMutex);
6446 }
6447 } else if (isInputBuffer) {
6448 pthread_mutex_lock(&mMutex);
6449 handleInputBufferWithLock(frame_number);
6450 pthread_mutex_unlock(&mMutex);
6451 } else {
6452 pthread_mutex_lock(&mMutex);
6453 handleBufferWithLock(buffer, frame_number);
6454 pthread_mutex_unlock(&mMutex);
6455 }
6456 return;
6457}
6458
6459/*===========================================================================
6460 * FUNCTION : getReprocessibleOutputStreamId
6461 *
6462 * DESCRIPTION: Get source output stream id for the input reprocess stream
6463 * based on size and format, which would be the largest
6464 * output stream if an input stream exists.
6465 *
6466 * PARAMETERS :
6467 * @id : return the stream id if found
6468 *
6469 * RETURN : int32_t type of status
6470 * NO_ERROR -- success
6471 * none-zero failure code
6472 *==========================================================================*/
6473int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6474{
6475 /* check if any output or bidirectional stream with the same size and format
6476 and return that stream */
6477 if ((mInputStreamInfo.dim.width > 0) &&
6478 (mInputStreamInfo.dim.height > 0)) {
6479 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6480 it != mStreamInfo.end(); it++) {
6481
6482 camera3_stream_t *stream = (*it)->stream;
6483 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6484 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6485 (stream->format == mInputStreamInfo.format)) {
6486 // Usage flag for an input stream and the source output stream
6487 // may be different.
6488 LOGD("Found reprocessible output stream! %p", *it);
6489 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6490 stream->usage, mInputStreamInfo.usage);
6491
6492 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6493 if (channel != NULL && channel->mStreams[0]) {
6494 id = channel->mStreams[0]->getMyServerID();
6495 return NO_ERROR;
6496 }
6497 }
6498 }
6499 } else {
6500 LOGD("No input stream, so no reprocessible output stream");
6501 }
6502 return NAME_NOT_FOUND;
6503}
6504
6505/*===========================================================================
6506 * FUNCTION : lookupFwkName
6507 *
6508 * DESCRIPTION: In case the enum is not same in fwk and backend
6509 * make sure the parameter is correctly propogated
6510 *
6511 * PARAMETERS :
6512 * @arr : map between the two enums
6513 * @len : len of the map
6514 * @hal_name : name of the hal_parm to map
6515 *
6516 * RETURN : int type of status
6517 * fwk_name -- success
6518 * none-zero failure code
6519 *==========================================================================*/
6520template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6521 size_t len, halType hal_name)
6522{
6523
6524 for (size_t i = 0; i < len; i++) {
6525 if (arr[i].hal_name == hal_name) {
6526 return arr[i].fwk_name;
6527 }
6528 }
6529
6530 /* Not able to find matching framework type is not necessarily
6531 * an error case. This happens when mm-camera supports more attributes
6532 * than the frameworks do */
6533 LOGH("Cannot find matching framework type");
6534 return NAME_NOT_FOUND;
6535}
6536
6537/*===========================================================================
6538 * FUNCTION : lookupHalName
6539 *
6540 * DESCRIPTION: In case the enum is not same in fwk and backend
6541 * make sure the parameter is correctly propogated
6542 *
6543 * PARAMETERS :
6544 * @arr : map between the two enums
6545 * @len : len of the map
6546 * @fwk_name : name of the hal_parm to map
6547 *
6548 * RETURN : int32_t type of status
6549 * hal_name -- success
6550 * none-zero failure code
6551 *==========================================================================*/
6552template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6553 size_t len, fwkType fwk_name)
6554{
6555 for (size_t i = 0; i < len; i++) {
6556 if (arr[i].fwk_name == fwk_name) {
6557 return arr[i].hal_name;
6558 }
6559 }
6560
6561 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6562 return NAME_NOT_FOUND;
6563}
6564
6565/*===========================================================================
6566 * FUNCTION : lookupProp
6567 *
6568 * DESCRIPTION: lookup a value by its name
6569 *
6570 * PARAMETERS :
6571 * @arr : map between the two enums
6572 * @len : size of the map
6573 * @name : name to be looked up
6574 *
6575 * RETURN : Value if found
6576 * CAM_CDS_MODE_MAX if not found
6577 *==========================================================================*/
6578template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6579 size_t len, const char *name)
6580{
6581 if (name) {
6582 for (size_t i = 0; i < len; i++) {
6583 if (!strcmp(arr[i].desc, name)) {
6584 return arr[i].val;
6585 }
6586 }
6587 }
6588 return CAM_CDS_MODE_MAX;
6589}
6590
6591/*===========================================================================
6592 *
6593 * DESCRIPTION:
6594 *
6595 * PARAMETERS :
6596 * @metadata : metadata information from callback
6597 * @timestamp: metadata buffer timestamp
6598 * @request_id: request id
6599 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006600 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006601 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6602 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006604 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6605 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006606 *
6607 * RETURN : camera_metadata_t*
6608 * metadata in a format specified by fwk
6609 *==========================================================================*/
6610camera_metadata_t*
6611QCamera3HardwareInterface::translateFromHalMetadata(
6612 metadata_buffer_t *metadata,
6613 nsecs_t timestamp,
6614 int32_t request_id,
6615 const CameraMetadata& jpegMetadata,
6616 uint8_t pipeline_depth,
6617 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006618 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006619 /* DevCamDebug metadata translateFromHalMetadata argument */
6620 uint8_t DevCamDebug_meta_enable,
6621 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006622 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006623 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006624 bool lastMetadataInBatch,
6625 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006626{
6627 CameraMetadata camMetadata;
6628 camera_metadata_t *resultMetadata;
6629
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006630 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006631 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6632 * Timestamp is needed because it's used for shutter notify calculation.
6633 * */
6634 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6635 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006636 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006637 }
6638
Thierry Strudel3d639192016-09-09 11:52:26 -07006639 if (jpegMetadata.entryCount())
6640 camMetadata.append(jpegMetadata);
6641
6642 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6643 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6644 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6645 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006646 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006647 if (mBatchSize == 0) {
6648 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6649 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006651
Samuel Ha68ba5172016-12-15 18:41:12 -08006652 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6653 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6654 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6655 // DevCamDebug metadata translateFromHalMetadata AF
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6657 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6658 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6659 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6662 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6663 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6664 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6667 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6668 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6669 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6672 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6673 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6674 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6677 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6678 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6679 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6682 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6683 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6684 *DevCamDebug_af_monitor_pdaf_target_pos;
6685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6686 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6689 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6690 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6691 *DevCamDebug_af_monitor_pdaf_confidence;
6692 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6693 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6696 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6697 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6698 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6699 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6700 }
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6702 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6703 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6704 *DevCamDebug_af_monitor_tof_target_pos;
6705 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6706 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6709 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6710 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6711 *DevCamDebug_af_monitor_tof_confidence;
6712 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6713 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6714 }
6715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6716 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6717 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6718 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6719 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6722 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6723 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6724 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6725 &fwk_DevCamDebug_af_monitor_type_select, 1);
6726 }
6727 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6728 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6729 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6730 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6731 &fwk_DevCamDebug_af_monitor_refocus, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6734 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6735 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6736 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6737 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6740 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6741 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6742 *DevCamDebug_af_search_pdaf_target_pos;
6743 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6744 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6745 }
6746 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6747 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6748 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6749 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6750 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6751 }
6752 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6753 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6754 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6755 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6756 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6759 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6760 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6761 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6762 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6765 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6766 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6767 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6768 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6771 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6772 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6773 *DevCamDebug_af_search_tof_target_pos;
6774 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6775 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6776 }
6777 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6778 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6779 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6780 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6781 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6782 }
6783 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6784 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6785 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6786 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6787 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6788 }
6789 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6790 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6791 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6792 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6793 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6796 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6797 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6798 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6799 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6800 }
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6802 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6803 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6804 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6805 &fwk_DevCamDebug_af_search_type_select, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6808 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6809 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6810 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6811 &fwk_DevCamDebug_af_search_next_pos, 1);
6812 }
6813 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6814 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6815 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6816 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6817 &fwk_DevCamDebug_af_search_target_pos, 1);
6818 }
6819 // DevCamDebug metadata translateFromHalMetadata AEC
6820 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6821 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6822 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6823 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6826 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6827 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6828 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6831 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6832 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6833 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6834 }
6835 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6836 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6837 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6838 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6841 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6842 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6843 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6844 }
6845 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6846 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6847 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6848 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6849 }
6850 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6851 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6852 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6853 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6854 }
6855 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6856 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6857 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6858 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6859 }
Samuel Ha34229982017-02-17 13:51:11 -08006860 // DevCamDebug metadata translateFromHalMetadata zzHDR
6861 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6862 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6863 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6864 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6865 }
6866 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6867 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006868 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006869 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6870 }
6871 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6872 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6873 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6874 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6875 }
6876 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6877 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006878 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006879 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6880 }
6881 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6882 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6883 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6884 *DevCamDebug_aec_hdr_sensitivity_ratio;
6885 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6886 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6887 }
6888 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6889 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6890 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6891 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6892 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6893 }
6894 // DevCamDebug metadata translateFromHalMetadata ADRC
6895 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6896 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6897 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6898 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6899 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6900 }
6901 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6902 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6903 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6904 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6905 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6906 }
6907 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6908 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6909 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6910 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6911 }
6912 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6913 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6914 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6915 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6916 }
6917 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6918 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6919 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6920 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6921 }
6922 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6923 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6924 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6925 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6926 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006927 // DevCamDebug metadata translateFromHalMetadata AWB
6928 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6929 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6930 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6931 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6932 }
6933 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6934 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6935 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6936 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6937 }
6938 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6939 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6940 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6941 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6942 }
6943 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6944 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6945 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6946 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6947 }
6948 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6949 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6950 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6951 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6952 }
6953 }
6954 // atrace_end(ATRACE_TAG_ALWAYS);
6955
Thierry Strudel3d639192016-09-09 11:52:26 -07006956 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6957 int64_t fwk_frame_number = *frame_number;
6958 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6959 }
6960
6961 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6962 int32_t fps_range[2];
6963 fps_range[0] = (int32_t)float_range->min_fps;
6964 fps_range[1] = (int32_t)float_range->max_fps;
6965 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6966 fps_range, 2);
6967 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6968 fps_range[0], fps_range[1]);
6969 }
6970
6971 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6972 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6973 }
6974
6975 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6976 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6977 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6978 *sceneMode);
6979 if (NAME_NOT_FOUND != val) {
6980 uint8_t fwkSceneMode = (uint8_t)val;
6981 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6982 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6983 fwkSceneMode);
6984 }
6985 }
6986
6987 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6988 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6989 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6990 }
6991
6992 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6993 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6994 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6995 }
6996
6997 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6998 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6999 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7000 }
7001
7002 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7003 CAM_INTF_META_EDGE_MODE, metadata) {
7004 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7005 }
7006
7007 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7008 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7009 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7010 }
7011
7012 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7013 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7014 }
7015
7016 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7017 if (0 <= *flashState) {
7018 uint8_t fwk_flashState = (uint8_t) *flashState;
7019 if (!gCamCapability[mCameraId]->flash_available) {
7020 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7021 }
7022 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7023 }
7024 }
7025
7026 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7027 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7028 if (NAME_NOT_FOUND != val) {
7029 uint8_t fwk_flashMode = (uint8_t)val;
7030 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7031 }
7032 }
7033
7034 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7035 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7036 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7037 }
7038
7039 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7040 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7041 }
7042
7043 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7044 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7045 }
7046
7047 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7048 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7049 }
7050
7051 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7052 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7053 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7054 }
7055
7056 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7057 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7058 LOGD("fwk_videoStab = %d", fwk_videoStab);
7059 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7060 } else {
7061 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7062 // and so hardcoding the Video Stab result to OFF mode.
7063 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7064 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007065 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007066 }
7067
7068 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7069 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7070 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7071 }
7072
7073 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7074 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7075 }
7076
Thierry Strudel3d639192016-09-09 11:52:26 -07007077 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7078 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007079 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007080
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007081 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7082 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007083
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007084 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007085 blackLevelAppliedPattern->cam_black_level[0],
7086 blackLevelAppliedPattern->cam_black_level[1],
7087 blackLevelAppliedPattern->cam_black_level[2],
7088 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007089 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7090 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007091
7092#ifndef USE_HAL_3_3
7093 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307094 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007095 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307096 fwk_blackLevelInd[0] /= 16.0;
7097 fwk_blackLevelInd[1] /= 16.0;
7098 fwk_blackLevelInd[2] /= 16.0;
7099 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007100 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7101 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007102#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007103 }
7104
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007105#ifndef USE_HAL_3_3
7106 // Fixed whitelevel is used by ISP/Sensor
7107 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7108 &gCamCapability[mCameraId]->white_level, 1);
7109#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007110
7111 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7112 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7113 int32_t scalerCropRegion[4];
7114 scalerCropRegion[0] = hScalerCropRegion->left;
7115 scalerCropRegion[1] = hScalerCropRegion->top;
7116 scalerCropRegion[2] = hScalerCropRegion->width;
7117 scalerCropRegion[3] = hScalerCropRegion->height;
7118
7119 // Adjust crop region from sensor output coordinate system to active
7120 // array coordinate system.
7121 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7122 scalerCropRegion[2], scalerCropRegion[3]);
7123
7124 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7125 }
7126
7127 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7128 LOGD("sensorExpTime = %lld", *sensorExpTime);
7129 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7130 }
7131
7132 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7133 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7134 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7135 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7136 }
7137
7138 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7139 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7140 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7141 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7142 sensorRollingShutterSkew, 1);
7143 }
7144
7145 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7146 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7147 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7148
7149 //calculate the noise profile based on sensitivity
7150 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7151 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7152 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7153 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7154 noise_profile[i] = noise_profile_S;
7155 noise_profile[i+1] = noise_profile_O;
7156 }
7157 LOGD("noise model entry (S, O) is (%f, %f)",
7158 noise_profile_S, noise_profile_O);
7159 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7160 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7161 }
7162
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007163#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007164 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007165 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007166 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007167 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007168 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7169 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7170 }
7171 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007172#endif
7173
Thierry Strudel3d639192016-09-09 11:52:26 -07007174 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7175 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7176 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7177 }
7178
7179 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7180 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7181 *faceDetectMode);
7182 if (NAME_NOT_FOUND != val) {
7183 uint8_t fwk_faceDetectMode = (uint8_t)val;
7184 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7185
7186 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7187 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7188 CAM_INTF_META_FACE_DETECTION, metadata) {
7189 uint8_t numFaces = MIN(
7190 faceDetectionInfo->num_faces_detected, MAX_ROI);
7191 int32_t faceIds[MAX_ROI];
7192 uint8_t faceScores[MAX_ROI];
7193 int32_t faceRectangles[MAX_ROI * 4];
7194 int32_t faceLandmarks[MAX_ROI * 6];
7195 size_t j = 0, k = 0;
7196
7197 for (size_t i = 0; i < numFaces; i++) {
7198 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7199 // Adjust crop region from sensor output coordinate system to active
7200 // array coordinate system.
7201 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7202 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7203 rect.width, rect.height);
7204
7205 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7206 faceRectangles+j, -1);
7207
Jason Lee8ce36fa2017-04-19 19:40:37 -07007208 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7209 "bottom-right (%d, %d)",
7210 faceDetectionInfo->frame_id, i,
7211 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7212 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7213
Thierry Strudel3d639192016-09-09 11:52:26 -07007214 j+= 4;
7215 }
7216 if (numFaces <= 0) {
7217 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7218 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7219 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7220 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7221 }
7222
7223 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7224 numFaces);
7225 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7226 faceRectangles, numFaces * 4U);
7227 if (fwk_faceDetectMode ==
7228 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7229 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7230 CAM_INTF_META_FACE_LANDMARK, metadata) {
7231
7232 for (size_t i = 0; i < numFaces; i++) {
7233 // Map the co-ordinate sensor output coordinate system to active
7234 // array coordinate system.
7235 mCropRegionMapper.toActiveArray(
7236 landmarks->face_landmarks[i].left_eye_center.x,
7237 landmarks->face_landmarks[i].left_eye_center.y);
7238 mCropRegionMapper.toActiveArray(
7239 landmarks->face_landmarks[i].right_eye_center.x,
7240 landmarks->face_landmarks[i].right_eye_center.y);
7241 mCropRegionMapper.toActiveArray(
7242 landmarks->face_landmarks[i].mouth_center.x,
7243 landmarks->face_landmarks[i].mouth_center.y);
7244
7245 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007246
7247 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7248 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7249 faceDetectionInfo->frame_id, i,
7250 faceLandmarks[k + LEFT_EYE_X],
7251 faceLandmarks[k + LEFT_EYE_Y],
7252 faceLandmarks[k + RIGHT_EYE_X],
7253 faceLandmarks[k + RIGHT_EYE_Y],
7254 faceLandmarks[k + MOUTH_X],
7255 faceLandmarks[k + MOUTH_Y]);
7256
Thierry Strudel04e026f2016-10-10 11:27:36 -07007257 k+= TOTAL_LANDMARK_INDICES;
7258 }
7259 } else {
7260 for (size_t i = 0; i < numFaces; i++) {
7261 setInvalidLandmarks(faceLandmarks+k);
7262 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007263 }
7264 }
7265
Jason Lee49619db2017-04-13 12:07:22 -07007266 for (size_t i = 0; i < numFaces; i++) {
7267 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7268
7269 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7270 faceDetectionInfo->frame_id, i, faceIds[i]);
7271 }
7272
Thierry Strudel3d639192016-09-09 11:52:26 -07007273 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7274 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7275 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007276 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007277 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7278 CAM_INTF_META_FACE_BLINK, metadata) {
7279 uint8_t detected[MAX_ROI];
7280 uint8_t degree[MAX_ROI * 2];
7281 for (size_t i = 0; i < numFaces; i++) {
7282 detected[i] = blinks->blink[i].blink_detected;
7283 degree[2 * i] = blinks->blink[i].left_blink;
7284 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007285
Jason Lee49619db2017-04-13 12:07:22 -07007286 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7287 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7288 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7289 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007290 }
7291 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7292 detected, numFaces);
7293 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7294 degree, numFaces * 2);
7295 }
7296 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7297 CAM_INTF_META_FACE_SMILE, metadata) {
7298 uint8_t degree[MAX_ROI];
7299 uint8_t confidence[MAX_ROI];
7300 for (size_t i = 0; i < numFaces; i++) {
7301 degree[i] = smiles->smile[i].smile_degree;
7302 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007303
Jason Lee49619db2017-04-13 12:07:22 -07007304 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7305 "smile_degree=%d, smile_score=%d",
7306 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007307 }
7308 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7309 degree, numFaces);
7310 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7311 confidence, numFaces);
7312 }
7313 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7314 CAM_INTF_META_FACE_GAZE, metadata) {
7315 int8_t angle[MAX_ROI];
7316 int32_t direction[MAX_ROI * 3];
7317 int8_t degree[MAX_ROI * 2];
7318 for (size_t i = 0; i < numFaces; i++) {
7319 angle[i] = gazes->gaze[i].gaze_angle;
7320 direction[3 * i] = gazes->gaze[i].updown_dir;
7321 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7322 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7323 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7324 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007325
7326 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7327 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7328 "left_right_gaze=%d, top_bottom_gaze=%d",
7329 faceDetectionInfo->frame_id, i, angle[i],
7330 direction[3 * i], direction[3 * i + 1],
7331 direction[3 * i + 2],
7332 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007333 }
7334 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7335 (uint8_t *)angle, numFaces);
7336 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7337 direction, numFaces * 3);
7338 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7339 (uint8_t *)degree, numFaces * 2);
7340 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007341 }
7342 }
7343 }
7344 }
7345
7346 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7347 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007348 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007349 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007350 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007351
Shuzhen Wang14415f52016-11-16 18:26:18 -08007352 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7353 histogramBins = *histBins;
7354 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7355 }
7356
7357 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007358 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7359 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007360 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007361
7362 switch (stats_data->type) {
7363 case CAM_HISTOGRAM_TYPE_BAYER:
7364 switch (stats_data->bayer_stats.data_type) {
7365 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007366 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7367 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007368 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007369 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7370 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007371 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007372 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7373 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007374 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007375 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007376 case CAM_STATS_CHANNEL_R:
7377 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007378 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7379 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007380 }
7381 break;
7382 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007383 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007384 break;
7385 }
7386
Shuzhen Wang14415f52016-11-16 18:26:18 -08007387 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007388 }
7389 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007390 }
7391
7392 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7393 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7394 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7395 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7396 }
7397
7398 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7399 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7400 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7401 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7402 }
7403
7404 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7405 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7406 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7407 CAM_MAX_SHADING_MAP_HEIGHT);
7408 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7409 CAM_MAX_SHADING_MAP_WIDTH);
7410 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7411 lensShadingMap->lens_shading, 4U * map_width * map_height);
7412 }
7413
7414 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7415 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7416 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7417 }
7418
7419 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7420 //Populate CAM_INTF_META_TONEMAP_CURVES
7421 /* ch0 = G, ch 1 = B, ch 2 = R*/
7422 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7423 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7424 tonemap->tonemap_points_cnt,
7425 CAM_MAX_TONEMAP_CURVE_SIZE);
7426 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7427 }
7428
7429 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7430 &tonemap->curves[0].tonemap_points[0][0],
7431 tonemap->tonemap_points_cnt * 2);
7432
7433 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7434 &tonemap->curves[1].tonemap_points[0][0],
7435 tonemap->tonemap_points_cnt * 2);
7436
7437 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7438 &tonemap->curves[2].tonemap_points[0][0],
7439 tonemap->tonemap_points_cnt * 2);
7440 }
7441
7442 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7443 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7444 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7445 CC_GAIN_MAX);
7446 }
7447
7448 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7449 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7450 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7451 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7452 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7453 }
7454
7455 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7456 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7457 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7458 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7459 toneCurve->tonemap_points_cnt,
7460 CAM_MAX_TONEMAP_CURVE_SIZE);
7461 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7462 }
7463 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7464 (float*)toneCurve->curve.tonemap_points,
7465 toneCurve->tonemap_points_cnt * 2);
7466 }
7467
7468 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7469 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7470 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7471 predColorCorrectionGains->gains, 4);
7472 }
7473
7474 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7475 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7476 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7477 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7478 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7479 }
7480
7481 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7482 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7483 }
7484
7485 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7486 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7487 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7488 }
7489
7490 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7491 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7492 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7493 }
7494
7495 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7496 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7497 *effectMode);
7498 if (NAME_NOT_FOUND != val) {
7499 uint8_t fwk_effectMode = (uint8_t)val;
7500 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7501 }
7502 }
7503
7504 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7505 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7506 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7507 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7508 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7509 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7510 }
7511 int32_t fwk_testPatternData[4];
7512 fwk_testPatternData[0] = testPatternData->r;
7513 fwk_testPatternData[3] = testPatternData->b;
7514 switch (gCamCapability[mCameraId]->color_arrangement) {
7515 case CAM_FILTER_ARRANGEMENT_RGGB:
7516 case CAM_FILTER_ARRANGEMENT_GRBG:
7517 fwk_testPatternData[1] = testPatternData->gr;
7518 fwk_testPatternData[2] = testPatternData->gb;
7519 break;
7520 case CAM_FILTER_ARRANGEMENT_GBRG:
7521 case CAM_FILTER_ARRANGEMENT_BGGR:
7522 fwk_testPatternData[2] = testPatternData->gr;
7523 fwk_testPatternData[1] = testPatternData->gb;
7524 break;
7525 default:
7526 LOGE("color arrangement %d is not supported",
7527 gCamCapability[mCameraId]->color_arrangement);
7528 break;
7529 }
7530 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7531 }
7532
7533 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7534 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7535 }
7536
7537 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7538 String8 str((const char *)gps_methods);
7539 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7540 }
7541
7542 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7543 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7544 }
7545
7546 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7547 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7548 }
7549
7550 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7551 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7552 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7553 }
7554
7555 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7556 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7557 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7558 }
7559
7560 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7561 int32_t fwk_thumb_size[2];
7562 fwk_thumb_size[0] = thumb_size->width;
7563 fwk_thumb_size[1] = thumb_size->height;
7564 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7565 }
7566
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007567 // Skip reprocess metadata if there is no input stream.
7568 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7569 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7570 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7571 privateData,
7572 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007574 }
7575
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007576 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007577 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007578 meteringMode, 1);
7579 }
7580
Thierry Strudel54dc9782017-02-15 12:12:10 -08007581 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7582 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7583 LOGD("hdr_scene_data: %d %f\n",
7584 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7585 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7586 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7587 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7588 &isHdr, 1);
7589 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7590 &isHdrConfidence, 1);
7591 }
7592
7593
7594
Thierry Strudel3d639192016-09-09 11:52:26 -07007595 if (metadata->is_tuning_params_valid) {
7596 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7597 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7598 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7599
7600
7601 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7602 sizeof(uint32_t));
7603 data += sizeof(uint32_t);
7604
7605 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7606 sizeof(uint32_t));
7607 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7608 data += sizeof(uint32_t);
7609
7610 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7611 sizeof(uint32_t));
7612 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7613 data += sizeof(uint32_t);
7614
7615 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7616 sizeof(uint32_t));
7617 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7618 data += sizeof(uint32_t);
7619
7620 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7621 sizeof(uint32_t));
7622 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7623 data += sizeof(uint32_t);
7624
7625 metadata->tuning_params.tuning_mod3_data_size = 0;
7626 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7627 sizeof(uint32_t));
7628 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7629 data += sizeof(uint32_t);
7630
7631 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7632 TUNING_SENSOR_DATA_MAX);
7633 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7634 count);
7635 data += count;
7636
7637 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7638 TUNING_VFE_DATA_MAX);
7639 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7640 count);
7641 data += count;
7642
7643 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7644 TUNING_CPP_DATA_MAX);
7645 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7646 count);
7647 data += count;
7648
7649 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7650 TUNING_CAC_DATA_MAX);
7651 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7652 count);
7653 data += count;
7654
7655 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7656 (int32_t *)(void *)tuning_meta_data_blob,
7657 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7658 }
7659
7660 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7661 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7662 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7663 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7664 NEUTRAL_COL_POINTS);
7665 }
7666
7667 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7668 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7669 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7670 }
7671
7672 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7673 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7674 // Adjust crop region from sensor output coordinate system to active
7675 // array coordinate system.
7676 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7677 hAeRegions->rect.width, hAeRegions->rect.height);
7678
7679 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7680 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7681 REGIONS_TUPLE_COUNT);
7682 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7683 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7684 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7685 hAeRegions->rect.height);
7686 }
7687
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007688 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7689 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7690 if (NAME_NOT_FOUND != val) {
7691 uint8_t fwkAfMode = (uint8_t)val;
7692 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7693 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7694 } else {
7695 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7696 val);
7697 }
7698 }
7699
Thierry Strudel3d639192016-09-09 11:52:26 -07007700 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7701 uint8_t fwk_afState = (uint8_t) *afState;
7702 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007703 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007704 }
7705
7706 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7707 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7708 }
7709
7710 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7711 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7712 }
7713
7714 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7715 uint8_t fwk_lensState = *lensState;
7716 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7717 }
7718
Thierry Strudel3d639192016-09-09 11:52:26 -07007719
7720 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007721 uint32_t ab_mode = *hal_ab_mode;
7722 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7723 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7724 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7725 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007726 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007727 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007728 if (NAME_NOT_FOUND != val) {
7729 uint8_t fwk_ab_mode = (uint8_t)val;
7730 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7731 }
7732 }
7733
7734 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7735 int val = lookupFwkName(SCENE_MODES_MAP,
7736 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7737 if (NAME_NOT_FOUND != val) {
7738 uint8_t fwkBestshotMode = (uint8_t)val;
7739 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7740 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7741 } else {
7742 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7743 }
7744 }
7745
7746 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7747 uint8_t fwk_mode = (uint8_t) *mode;
7748 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7749 }
7750
7751 /* Constant metadata values to be update*/
7752 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7753 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7754
7755 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7756 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7757
7758 int32_t hotPixelMap[2];
7759 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7760
7761 // CDS
7762 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7763 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7764 }
7765
Thierry Strudel04e026f2016-10-10 11:27:36 -07007766 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7767 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007768 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007769 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7770 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7771 } else {
7772 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7773 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007774
7775 if(fwk_hdr != curr_hdr_state) {
7776 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7777 if(fwk_hdr)
7778 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7779 else
7780 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7781 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007782 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7783 }
7784
Thierry Strudel54dc9782017-02-15 12:12:10 -08007785 //binning correction
7786 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7787 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7788 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7789 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7790 }
7791
Thierry Strudel04e026f2016-10-10 11:27:36 -07007792 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007793 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007794 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7795 int8_t is_ir_on = 0;
7796
7797 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7798 if(is_ir_on != curr_ir_state) {
7799 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7800 if(is_ir_on)
7801 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7802 else
7803 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7804 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007805 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007806 }
7807
Thierry Strudel269c81a2016-10-12 12:13:59 -07007808 // AEC SPEED
7809 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7810 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7811 }
7812
7813 // AWB SPEED
7814 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7815 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7816 }
7817
Thierry Strudel3d639192016-09-09 11:52:26 -07007818 // TNR
7819 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7820 uint8_t tnr_enable = tnr->denoise_enable;
7821 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007822 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7823 int8_t is_tnr_on = 0;
7824
7825 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7826 if(is_tnr_on != curr_tnr_state) {
7827 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7828 if(is_tnr_on)
7829 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7830 else
7831 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7832 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007833
7834 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7835 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7836 }
7837
7838 // Reprocess crop data
7839 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7840 uint8_t cnt = crop_data->num_of_streams;
7841 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7842 // mm-qcamera-daemon only posts crop_data for streams
7843 // not linked to pproc. So no valid crop metadata is not
7844 // necessarily an error case.
7845 LOGD("No valid crop metadata entries");
7846 } else {
7847 uint32_t reproc_stream_id;
7848 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7849 LOGD("No reprocessible stream found, ignore crop data");
7850 } else {
7851 int rc = NO_ERROR;
7852 Vector<int32_t> roi_map;
7853 int32_t *crop = new int32_t[cnt*4];
7854 if (NULL == crop) {
7855 rc = NO_MEMORY;
7856 }
7857 if (NO_ERROR == rc) {
7858 int32_t streams_found = 0;
7859 for (size_t i = 0; i < cnt; i++) {
7860 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7861 if (pprocDone) {
7862 // HAL already does internal reprocessing,
7863 // either via reprocessing before JPEG encoding,
7864 // or offline postprocessing for pproc bypass case.
7865 crop[0] = 0;
7866 crop[1] = 0;
7867 crop[2] = mInputStreamInfo.dim.width;
7868 crop[3] = mInputStreamInfo.dim.height;
7869 } else {
7870 crop[0] = crop_data->crop_info[i].crop.left;
7871 crop[1] = crop_data->crop_info[i].crop.top;
7872 crop[2] = crop_data->crop_info[i].crop.width;
7873 crop[3] = crop_data->crop_info[i].crop.height;
7874 }
7875 roi_map.add(crop_data->crop_info[i].roi_map.left);
7876 roi_map.add(crop_data->crop_info[i].roi_map.top);
7877 roi_map.add(crop_data->crop_info[i].roi_map.width);
7878 roi_map.add(crop_data->crop_info[i].roi_map.height);
7879 streams_found++;
7880 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7881 crop[0], crop[1], crop[2], crop[3]);
7882 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7883 crop_data->crop_info[i].roi_map.left,
7884 crop_data->crop_info[i].roi_map.top,
7885 crop_data->crop_info[i].roi_map.width,
7886 crop_data->crop_info[i].roi_map.height);
7887 break;
7888
7889 }
7890 }
7891 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7892 &streams_found, 1);
7893 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7894 crop, (size_t)(streams_found * 4));
7895 if (roi_map.array()) {
7896 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7897 roi_map.array(), roi_map.size());
7898 }
7899 }
7900 if (crop) {
7901 delete [] crop;
7902 }
7903 }
7904 }
7905 }
7906
7907 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7908 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7909 // so hardcoding the CAC result to OFF mode.
7910 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7911 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7912 } else {
7913 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7914 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7915 *cacMode);
7916 if (NAME_NOT_FOUND != val) {
7917 uint8_t resultCacMode = (uint8_t)val;
7918 // check whether CAC result from CB is equal to Framework set CAC mode
7919 // If not equal then set the CAC mode came in corresponding request
7920 if (fwk_cacMode != resultCacMode) {
7921 resultCacMode = fwk_cacMode;
7922 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007923 //Check if CAC is disabled by property
7924 if (m_cacModeDisabled) {
7925 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7926 }
7927
Thierry Strudel3d639192016-09-09 11:52:26 -07007928 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7929 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7930 } else {
7931 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7932 }
7933 }
7934 }
7935
7936 // Post blob of cam_cds_data through vendor tag.
7937 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7938 uint8_t cnt = cdsInfo->num_of_streams;
7939 cam_cds_data_t cdsDataOverride;
7940 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7941 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7942 cdsDataOverride.num_of_streams = 1;
7943 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7944 uint32_t reproc_stream_id;
7945 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7946 LOGD("No reprocessible stream found, ignore cds data");
7947 } else {
7948 for (size_t i = 0; i < cnt; i++) {
7949 if (cdsInfo->cds_info[i].stream_id ==
7950 reproc_stream_id) {
7951 cdsDataOverride.cds_info[0].cds_enable =
7952 cdsInfo->cds_info[i].cds_enable;
7953 break;
7954 }
7955 }
7956 }
7957 } else {
7958 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7959 }
7960 camMetadata.update(QCAMERA3_CDS_INFO,
7961 (uint8_t *)&cdsDataOverride,
7962 sizeof(cam_cds_data_t));
7963 }
7964
7965 // Ldaf calibration data
7966 if (!mLdafCalibExist) {
7967 IF_META_AVAILABLE(uint32_t, ldafCalib,
7968 CAM_INTF_META_LDAF_EXIF, metadata) {
7969 mLdafCalibExist = true;
7970 mLdafCalib[0] = ldafCalib[0];
7971 mLdafCalib[1] = ldafCalib[1];
7972 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7973 ldafCalib[0], ldafCalib[1]);
7974 }
7975 }
7976
Thierry Strudel54dc9782017-02-15 12:12:10 -08007977 // EXIF debug data through vendor tag
7978 /*
7979 * Mobicat Mask can assume 3 values:
7980 * 1 refers to Mobicat data,
7981 * 2 refers to Stats Debug and Exif Debug Data
7982 * 3 refers to Mobicat and Stats Debug Data
7983 * We want to make sure that we are sending Exif debug data
7984 * only when Mobicat Mask is 2.
7985 */
7986 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7987 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7988 (uint8_t *)(void *)mExifParams.debug_params,
7989 sizeof(mm_jpeg_debug_exif_params_t));
7990 }
7991
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007992 // Reprocess and DDM debug data through vendor tag
7993 cam_reprocess_info_t repro_info;
7994 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7996 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007997 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007998 }
7999 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8000 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008001 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008002 }
8003 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8004 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008005 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008006 }
8007 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8008 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008009 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008010 }
8011 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8012 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008013 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008014 }
8015 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008016 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008017 }
8018 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8019 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008020 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008021 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008022 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8023 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8024 }
8025 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8026 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8027 }
8028 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8029 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008030
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008031 // INSTANT AEC MODE
8032 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8033 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8034 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8035 }
8036
Shuzhen Wange763e802016-03-31 10:24:29 -07008037 // AF scene change
8038 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8039 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8040 }
8041
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008042 // Enable ZSL
8043 if (enableZsl != nullptr) {
8044 uint8_t value = *enableZsl ?
8045 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8046 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8047 }
8048
Xu Han821ea9c2017-05-23 09:00:40 -07008049 // OIS Data
8050 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8051 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8052 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8053 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8054 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8055 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8056 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8057 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8058 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8059 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8060 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8061 }
8062
Thierry Strudel3d639192016-09-09 11:52:26 -07008063 resultMetadata = camMetadata.release();
8064 return resultMetadata;
8065}
8066
8067/*===========================================================================
8068 * FUNCTION : saveExifParams
8069 *
8070 * DESCRIPTION:
8071 *
8072 * PARAMETERS :
8073 * @metadata : metadata information from callback
8074 *
8075 * RETURN : none
8076 *
8077 *==========================================================================*/
8078void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8079{
8080 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8081 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8082 if (mExifParams.debug_params) {
8083 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8084 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8085 }
8086 }
8087 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8088 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8089 if (mExifParams.debug_params) {
8090 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8091 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8092 }
8093 }
8094 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8095 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8096 if (mExifParams.debug_params) {
8097 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8098 mExifParams.debug_params->af_debug_params_valid = TRUE;
8099 }
8100 }
8101 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8102 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8103 if (mExifParams.debug_params) {
8104 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8105 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8106 }
8107 }
8108 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8109 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8110 if (mExifParams.debug_params) {
8111 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8112 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8113 }
8114 }
8115 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8116 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8117 if (mExifParams.debug_params) {
8118 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8119 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8120 }
8121 }
8122 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8123 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8124 if (mExifParams.debug_params) {
8125 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8126 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8127 }
8128 }
8129 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8130 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8131 if (mExifParams.debug_params) {
8132 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8133 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8134 }
8135 }
8136}
8137
8138/*===========================================================================
8139 * FUNCTION : get3AExifParams
8140 *
8141 * DESCRIPTION:
8142 *
8143 * PARAMETERS : none
8144 *
8145 *
8146 * RETURN : mm_jpeg_exif_params_t
8147 *
8148 *==========================================================================*/
8149mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8150{
8151 return mExifParams;
8152}
8153
8154/*===========================================================================
8155 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8156 *
8157 * DESCRIPTION:
8158 *
8159 * PARAMETERS :
8160 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008161 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8162 * urgent metadata in a batch. Always true for
8163 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008164 *
8165 * RETURN : camera_metadata_t*
8166 * metadata in a format specified by fwk
8167 *==========================================================================*/
8168camera_metadata_t*
8169QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008170 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008171{
8172 CameraMetadata camMetadata;
8173 camera_metadata_t *resultMetadata;
8174
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008175 if (!lastUrgentMetadataInBatch) {
8176 /* In batch mode, use empty metadata if this is not the last in batch
8177 */
8178 resultMetadata = allocate_camera_metadata(0, 0);
8179 return resultMetadata;
8180 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008181
8182 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8183 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8184 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8185 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8186 }
8187
8188 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8189 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8190 &aecTrigger->trigger, 1);
8191 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8192 &aecTrigger->trigger_id, 1);
8193 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8194 aecTrigger->trigger);
8195 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8196 aecTrigger->trigger_id);
8197 }
8198
8199 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8200 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8201 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8202 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8203 }
8204
Thierry Strudel3d639192016-09-09 11:52:26 -07008205 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8206 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8207 &af_trigger->trigger, 1);
8208 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8209 af_trigger->trigger);
8210 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8211 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8212 af_trigger->trigger_id);
8213 }
8214
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008215 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8216 /*af regions*/
8217 int32_t afRegions[REGIONS_TUPLE_COUNT];
8218 // Adjust crop region from sensor output coordinate system to active
8219 // array coordinate system.
8220 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8221 hAfRegions->rect.width, hAfRegions->rect.height);
8222
8223 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8224 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8225 REGIONS_TUPLE_COUNT);
8226 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8227 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8228 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8229 hAfRegions->rect.height);
8230 }
8231
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008232 // AF region confidence
8233 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8234 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8235 }
8236
Thierry Strudel3d639192016-09-09 11:52:26 -07008237 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8238 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8239 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8240 if (NAME_NOT_FOUND != val) {
8241 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8242 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8243 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8244 } else {
8245 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8246 }
8247 }
8248
8249 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8250 uint32_t aeMode = CAM_AE_MODE_MAX;
8251 int32_t flashMode = CAM_FLASH_MODE_MAX;
8252 int32_t redeye = -1;
8253 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8254 aeMode = *pAeMode;
8255 }
8256 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8257 flashMode = *pFlashMode;
8258 }
8259 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8260 redeye = *pRedeye;
8261 }
8262
8263 if (1 == redeye) {
8264 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8265 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8266 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8267 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8268 flashMode);
8269 if (NAME_NOT_FOUND != val) {
8270 fwk_aeMode = (uint8_t)val;
8271 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8272 } else {
8273 LOGE("Unsupported flash mode %d", flashMode);
8274 }
8275 } else if (aeMode == CAM_AE_MODE_ON) {
8276 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8277 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8278 } else if (aeMode == CAM_AE_MODE_OFF) {
8279 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8280 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008281 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8282 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8283 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008284 } else {
8285 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8286 "flashMode:%d, aeMode:%u!!!",
8287 redeye, flashMode, aeMode);
8288 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008289 if (mInstantAEC) {
8290 // Increment frame Idx count untill a bound reached for instant AEC.
8291 mInstantAecFrameIdxCount++;
8292 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8293 CAM_INTF_META_AEC_INFO, metadata) {
8294 LOGH("ae_params->settled = %d",ae_params->settled);
8295 // If AEC settled, or if number of frames reached bound value,
8296 // should reset instant AEC.
8297 if (ae_params->settled ||
8298 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8299 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8300 mInstantAEC = false;
8301 mResetInstantAEC = true;
8302 mInstantAecFrameIdxCount = 0;
8303 }
8304 }
8305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008306 resultMetadata = camMetadata.release();
8307 return resultMetadata;
8308}
8309
8310/*===========================================================================
8311 * FUNCTION : dumpMetadataToFile
8312 *
8313 * DESCRIPTION: Dumps tuning metadata to file system
8314 *
8315 * PARAMETERS :
8316 * @meta : tuning metadata
8317 * @dumpFrameCount : current dump frame count
8318 * @enabled : Enable mask
8319 *
8320 *==========================================================================*/
8321void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8322 uint32_t &dumpFrameCount,
8323 bool enabled,
8324 const char *type,
8325 uint32_t frameNumber)
8326{
8327 //Some sanity checks
8328 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8329 LOGE("Tuning sensor data size bigger than expected %d: %d",
8330 meta.tuning_sensor_data_size,
8331 TUNING_SENSOR_DATA_MAX);
8332 return;
8333 }
8334
8335 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8336 LOGE("Tuning VFE data size bigger than expected %d: %d",
8337 meta.tuning_vfe_data_size,
8338 TUNING_VFE_DATA_MAX);
8339 return;
8340 }
8341
8342 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8343 LOGE("Tuning CPP data size bigger than expected %d: %d",
8344 meta.tuning_cpp_data_size,
8345 TUNING_CPP_DATA_MAX);
8346 return;
8347 }
8348
8349 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8350 LOGE("Tuning CAC data size bigger than expected %d: %d",
8351 meta.tuning_cac_data_size,
8352 TUNING_CAC_DATA_MAX);
8353 return;
8354 }
8355 //
8356
8357 if(enabled){
8358 char timeBuf[FILENAME_MAX];
8359 char buf[FILENAME_MAX];
8360 memset(buf, 0, sizeof(buf));
8361 memset(timeBuf, 0, sizeof(timeBuf));
8362 time_t current_time;
8363 struct tm * timeinfo;
8364 time (&current_time);
8365 timeinfo = localtime (&current_time);
8366 if (timeinfo != NULL) {
8367 strftime (timeBuf, sizeof(timeBuf),
8368 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8369 }
8370 String8 filePath(timeBuf);
8371 snprintf(buf,
8372 sizeof(buf),
8373 "%dm_%s_%d.bin",
8374 dumpFrameCount,
8375 type,
8376 frameNumber);
8377 filePath.append(buf);
8378 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8379 if (file_fd >= 0) {
8380 ssize_t written_len = 0;
8381 meta.tuning_data_version = TUNING_DATA_VERSION;
8382 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8383 written_len += write(file_fd, data, sizeof(uint32_t));
8384 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8385 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8386 written_len += write(file_fd, data, sizeof(uint32_t));
8387 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8388 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8389 written_len += write(file_fd, data, sizeof(uint32_t));
8390 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8391 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8392 written_len += write(file_fd, data, sizeof(uint32_t));
8393 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8394 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8395 written_len += write(file_fd, data, sizeof(uint32_t));
8396 meta.tuning_mod3_data_size = 0;
8397 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8398 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8399 written_len += write(file_fd, data, sizeof(uint32_t));
8400 size_t total_size = meta.tuning_sensor_data_size;
8401 data = (void *)((uint8_t *)&meta.data);
8402 written_len += write(file_fd, data, total_size);
8403 total_size = meta.tuning_vfe_data_size;
8404 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8405 written_len += write(file_fd, data, total_size);
8406 total_size = meta.tuning_cpp_data_size;
8407 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8408 written_len += write(file_fd, data, total_size);
8409 total_size = meta.tuning_cac_data_size;
8410 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8411 written_len += write(file_fd, data, total_size);
8412 close(file_fd);
8413 }else {
8414 LOGE("fail to open file for metadata dumping");
8415 }
8416 }
8417}
8418
8419/*===========================================================================
8420 * FUNCTION : cleanAndSortStreamInfo
8421 *
8422 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8423 * and sort them such that raw stream is at the end of the list
8424 * This is a workaround for camera daemon constraint.
8425 *
8426 * PARAMETERS : None
8427 *
8428 *==========================================================================*/
8429void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8430{
8431 List<stream_info_t *> newStreamInfo;
8432
8433 /*clean up invalid streams*/
8434 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8435 it != mStreamInfo.end();) {
8436 if(((*it)->status) == INVALID){
8437 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8438 delete channel;
8439 free(*it);
8440 it = mStreamInfo.erase(it);
8441 } else {
8442 it++;
8443 }
8444 }
8445
8446 // Move preview/video/callback/snapshot streams into newList
8447 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8448 it != mStreamInfo.end();) {
8449 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8450 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8451 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8452 newStreamInfo.push_back(*it);
8453 it = mStreamInfo.erase(it);
8454 } else
8455 it++;
8456 }
8457 // Move raw streams into newList
8458 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8459 it != mStreamInfo.end();) {
8460 newStreamInfo.push_back(*it);
8461 it = mStreamInfo.erase(it);
8462 }
8463
8464 mStreamInfo = newStreamInfo;
8465}
8466
8467/*===========================================================================
8468 * FUNCTION : extractJpegMetadata
8469 *
8470 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8471 * JPEG metadata is cached in HAL, and return as part of capture
8472 * result when metadata is returned from camera daemon.
8473 *
8474 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8475 * @request: capture request
8476 *
8477 *==========================================================================*/
8478void QCamera3HardwareInterface::extractJpegMetadata(
8479 CameraMetadata& jpegMetadata,
8480 const camera3_capture_request_t *request)
8481{
8482 CameraMetadata frame_settings;
8483 frame_settings = request->settings;
8484
8485 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8486 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8487 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8488 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8489
8490 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8491 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8492 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8493 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8494
8495 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8496 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8497 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8498 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8499
8500 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8501 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8502 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8503 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8504
8505 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8506 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8507 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8508 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8509
8510 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8511 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8512 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8513 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8514
8515 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8516 int32_t thumbnail_size[2];
8517 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8518 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8519 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8520 int32_t orientation =
8521 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008522 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008523 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8524 int32_t temp;
8525 temp = thumbnail_size[0];
8526 thumbnail_size[0] = thumbnail_size[1];
8527 thumbnail_size[1] = temp;
8528 }
8529 }
8530 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8531 thumbnail_size,
8532 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8533 }
8534
8535}
8536
8537/*===========================================================================
8538 * FUNCTION : convertToRegions
8539 *
8540 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8541 *
8542 * PARAMETERS :
8543 * @rect : cam_rect_t struct to convert
8544 * @region : int32_t destination array
8545 * @weight : if we are converting from cam_area_t, weight is valid
8546 * else weight = -1
8547 *
8548 *==========================================================================*/
8549void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8550 int32_t *region, int weight)
8551{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008552 region[FACE_LEFT] = rect.left;
8553 region[FACE_TOP] = rect.top;
8554 region[FACE_RIGHT] = rect.left + rect.width;
8555 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008556 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008557 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008558 }
8559}
8560
8561/*===========================================================================
8562 * FUNCTION : convertFromRegions
8563 *
8564 * DESCRIPTION: helper method to convert from array to cam_rect_t
8565 *
8566 * PARAMETERS :
8567 * @rect : cam_rect_t struct to convert
8568 * @region : int32_t destination array
8569 * @weight : if we are converting from cam_area_t, weight is valid
8570 * else weight = -1
8571 *
8572 *==========================================================================*/
8573void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008574 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008575{
Thierry Strudel3d639192016-09-09 11:52:26 -07008576 int32_t x_min = frame_settings.find(tag).data.i32[0];
8577 int32_t y_min = frame_settings.find(tag).data.i32[1];
8578 int32_t x_max = frame_settings.find(tag).data.i32[2];
8579 int32_t y_max = frame_settings.find(tag).data.i32[3];
8580 roi.weight = frame_settings.find(tag).data.i32[4];
8581 roi.rect.left = x_min;
8582 roi.rect.top = y_min;
8583 roi.rect.width = x_max - x_min;
8584 roi.rect.height = y_max - y_min;
8585}
8586
8587/*===========================================================================
8588 * FUNCTION : resetIfNeededROI
8589 *
8590 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8591 * crop region
8592 *
8593 * PARAMETERS :
8594 * @roi : cam_area_t struct to resize
8595 * @scalerCropRegion : cam_crop_region_t region to compare against
8596 *
8597 *
8598 *==========================================================================*/
8599bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8600 const cam_crop_region_t* scalerCropRegion)
8601{
8602 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8603 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8604 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8605 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8606
8607 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8608 * without having this check the calculations below to validate if the roi
8609 * is inside scalar crop region will fail resulting in the roi not being
8610 * reset causing algorithm to continue to use stale roi window
8611 */
8612 if (roi->weight == 0) {
8613 return true;
8614 }
8615
8616 if ((roi_x_max < scalerCropRegion->left) ||
8617 // right edge of roi window is left of scalar crop's left edge
8618 (roi_y_max < scalerCropRegion->top) ||
8619 // bottom edge of roi window is above scalar crop's top edge
8620 (roi->rect.left > crop_x_max) ||
8621 // left edge of roi window is beyond(right) of scalar crop's right edge
8622 (roi->rect.top > crop_y_max)){
8623 // top edge of roi windo is above scalar crop's top edge
8624 return false;
8625 }
8626 if (roi->rect.left < scalerCropRegion->left) {
8627 roi->rect.left = scalerCropRegion->left;
8628 }
8629 if (roi->rect.top < scalerCropRegion->top) {
8630 roi->rect.top = scalerCropRegion->top;
8631 }
8632 if (roi_x_max > crop_x_max) {
8633 roi_x_max = crop_x_max;
8634 }
8635 if (roi_y_max > crop_y_max) {
8636 roi_y_max = crop_y_max;
8637 }
8638 roi->rect.width = roi_x_max - roi->rect.left;
8639 roi->rect.height = roi_y_max - roi->rect.top;
8640 return true;
8641}
8642
8643/*===========================================================================
8644 * FUNCTION : convertLandmarks
8645 *
8646 * DESCRIPTION: helper method to extract the landmarks from face detection info
8647 *
8648 * PARAMETERS :
8649 * @landmark_data : input landmark data to be converted
8650 * @landmarks : int32_t destination array
8651 *
8652 *
8653 *==========================================================================*/
8654void QCamera3HardwareInterface::convertLandmarks(
8655 cam_face_landmarks_info_t landmark_data,
8656 int32_t *landmarks)
8657{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008658 if (landmark_data.is_left_eye_valid) {
8659 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8660 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8661 } else {
8662 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8663 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8664 }
8665
8666 if (landmark_data.is_right_eye_valid) {
8667 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8668 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8669 } else {
8670 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8671 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8672 }
8673
8674 if (landmark_data.is_mouth_valid) {
8675 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8676 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8677 } else {
8678 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8679 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8680 }
8681}
8682
8683/*===========================================================================
8684 * FUNCTION : setInvalidLandmarks
8685 *
8686 * DESCRIPTION: helper method to set invalid landmarks
8687 *
8688 * PARAMETERS :
8689 * @landmarks : int32_t destination array
8690 *
8691 *
8692 *==========================================================================*/
8693void QCamera3HardwareInterface::setInvalidLandmarks(
8694 int32_t *landmarks)
8695{
8696 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8697 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8698 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8699 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8700 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8701 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008702}
8703
8704#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008705
8706/*===========================================================================
8707 * FUNCTION : getCapabilities
8708 *
8709 * DESCRIPTION: query camera capability from back-end
8710 *
8711 * PARAMETERS :
8712 * @ops : mm-interface ops structure
8713 * @cam_handle : camera handle for which we need capability
8714 *
8715 * RETURN : ptr type of capability structure
8716 * capability for success
8717 * NULL for failure
8718 *==========================================================================*/
8719cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8720 uint32_t cam_handle)
8721{
8722 int rc = NO_ERROR;
8723 QCamera3HeapMemory *capabilityHeap = NULL;
8724 cam_capability_t *cap_ptr = NULL;
8725
8726 if (ops == NULL) {
8727 LOGE("Invalid arguments");
8728 return NULL;
8729 }
8730
8731 capabilityHeap = new QCamera3HeapMemory(1);
8732 if (capabilityHeap == NULL) {
8733 LOGE("creation of capabilityHeap failed");
8734 return NULL;
8735 }
8736
8737 /* Allocate memory for capability buffer */
8738 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8739 if(rc != OK) {
8740 LOGE("No memory for cappability");
8741 goto allocate_failed;
8742 }
8743
8744 /* Map memory for capability buffer */
8745 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8746
8747 rc = ops->map_buf(cam_handle,
8748 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8749 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8750 if(rc < 0) {
8751 LOGE("failed to map capability buffer");
8752 rc = FAILED_TRANSACTION;
8753 goto map_failed;
8754 }
8755
8756 /* Query Capability */
8757 rc = ops->query_capability(cam_handle);
8758 if(rc < 0) {
8759 LOGE("failed to query capability");
8760 rc = FAILED_TRANSACTION;
8761 goto query_failed;
8762 }
8763
8764 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8765 if (cap_ptr == NULL) {
8766 LOGE("out of memory");
8767 rc = NO_MEMORY;
8768 goto query_failed;
8769 }
8770
8771 memset(cap_ptr, 0, sizeof(cam_capability_t));
8772 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8773
8774 int index;
8775 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8776 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8777 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8778 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8779 }
8780
8781query_failed:
8782 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8783map_failed:
8784 capabilityHeap->deallocate();
8785allocate_failed:
8786 delete capabilityHeap;
8787
8788 if (rc != NO_ERROR) {
8789 return NULL;
8790 } else {
8791 return cap_ptr;
8792 }
8793}
8794
Thierry Strudel3d639192016-09-09 11:52:26 -07008795/*===========================================================================
8796 * FUNCTION : initCapabilities
8797 *
8798 * DESCRIPTION: initialize camera capabilities in static data struct
8799 *
8800 * PARAMETERS :
8801 * @cameraId : camera Id
8802 *
8803 * RETURN : int32_t type of status
8804 * NO_ERROR -- success
8805 * none-zero failure code
8806 *==========================================================================*/
8807int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8808{
8809 int rc = 0;
8810 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008811 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008812
8813 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8814 if (rc) {
8815 LOGE("camera_open failed. rc = %d", rc);
8816 goto open_failed;
8817 }
8818 if (!cameraHandle) {
8819 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8820 goto open_failed;
8821 }
8822
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008823 handle = get_main_camera_handle(cameraHandle->camera_handle);
8824 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8825 if (gCamCapability[cameraId] == NULL) {
8826 rc = FAILED_TRANSACTION;
8827 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008828 }
8829
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008830 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008831 if (is_dual_camera_by_idx(cameraId)) {
8832 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8833 gCamCapability[cameraId]->aux_cam_cap =
8834 getCapabilities(cameraHandle->ops, handle);
8835 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8836 rc = FAILED_TRANSACTION;
8837 free(gCamCapability[cameraId]);
8838 goto failed_op;
8839 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008840
8841 // Copy the main camera capability to main_cam_cap struct
8842 gCamCapability[cameraId]->main_cam_cap =
8843 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8844 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8845 LOGE("out of memory");
8846 rc = NO_MEMORY;
8847 goto failed_op;
8848 }
8849 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8850 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008851 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008852failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008853 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8854 cameraHandle = NULL;
8855open_failed:
8856 return rc;
8857}
8858
8859/*==========================================================================
8860 * FUNCTION : get3Aversion
8861 *
8862 * DESCRIPTION: get the Q3A S/W version
8863 *
8864 * PARAMETERS :
8865 * @sw_version: Reference of Q3A structure which will hold version info upon
8866 * return
8867 *
8868 * RETURN : None
8869 *
8870 *==========================================================================*/
8871void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8872{
8873 if(gCamCapability[mCameraId])
8874 sw_version = gCamCapability[mCameraId]->q3a_version;
8875 else
8876 LOGE("Capability structure NULL!");
8877}
8878
8879
8880/*===========================================================================
8881 * FUNCTION : initParameters
8882 *
8883 * DESCRIPTION: initialize camera parameters
8884 *
8885 * PARAMETERS :
8886 *
8887 * RETURN : int32_t type of status
8888 * NO_ERROR -- success
8889 * none-zero failure code
8890 *==========================================================================*/
8891int QCamera3HardwareInterface::initParameters()
8892{
8893 int rc = 0;
8894
8895 //Allocate Set Param Buffer
8896 mParamHeap = new QCamera3HeapMemory(1);
8897 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8898 if(rc != OK) {
8899 rc = NO_MEMORY;
8900 LOGE("Failed to allocate SETPARM Heap memory");
8901 delete mParamHeap;
8902 mParamHeap = NULL;
8903 return rc;
8904 }
8905
8906 //Map memory for parameters buffer
8907 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8908 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8909 mParamHeap->getFd(0),
8910 sizeof(metadata_buffer_t),
8911 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8912 if(rc < 0) {
8913 LOGE("failed to map SETPARM buffer");
8914 rc = FAILED_TRANSACTION;
8915 mParamHeap->deallocate();
8916 delete mParamHeap;
8917 mParamHeap = NULL;
8918 return rc;
8919 }
8920
8921 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8922
8923 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8924 return rc;
8925}
8926
8927/*===========================================================================
8928 * FUNCTION : deinitParameters
8929 *
8930 * DESCRIPTION: de-initialize camera parameters
8931 *
8932 * PARAMETERS :
8933 *
8934 * RETURN : NONE
8935 *==========================================================================*/
8936void QCamera3HardwareInterface::deinitParameters()
8937{
8938 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8939 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8940
8941 mParamHeap->deallocate();
8942 delete mParamHeap;
8943 mParamHeap = NULL;
8944
8945 mParameters = NULL;
8946
8947 free(mPrevParameters);
8948 mPrevParameters = NULL;
8949}
8950
8951/*===========================================================================
8952 * FUNCTION : calcMaxJpegSize
8953 *
8954 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8955 *
8956 * PARAMETERS :
8957 *
8958 * RETURN : max_jpeg_size
8959 *==========================================================================*/
8960size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8961{
8962 size_t max_jpeg_size = 0;
8963 size_t temp_width, temp_height;
8964 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8965 MAX_SIZES_CNT);
8966 for (size_t i = 0; i < count; i++) {
8967 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8968 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8969 if (temp_width * temp_height > max_jpeg_size ) {
8970 max_jpeg_size = temp_width * temp_height;
8971 }
8972 }
8973 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8974 return max_jpeg_size;
8975}
8976
8977/*===========================================================================
8978 * FUNCTION : getMaxRawSize
8979 *
8980 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8981 *
8982 * PARAMETERS :
8983 *
8984 * RETURN : Largest supported Raw Dimension
8985 *==========================================================================*/
8986cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8987{
8988 int max_width = 0;
8989 cam_dimension_t maxRawSize;
8990
8991 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8992 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8993 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8994 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8995 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8996 }
8997 }
8998 return maxRawSize;
8999}
9000
9001
9002/*===========================================================================
9003 * FUNCTION : calcMaxJpegDim
9004 *
9005 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9006 *
9007 * PARAMETERS :
9008 *
9009 * RETURN : max_jpeg_dim
9010 *==========================================================================*/
9011cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9012{
9013 cam_dimension_t max_jpeg_dim;
9014 cam_dimension_t curr_jpeg_dim;
9015 max_jpeg_dim.width = 0;
9016 max_jpeg_dim.height = 0;
9017 curr_jpeg_dim.width = 0;
9018 curr_jpeg_dim.height = 0;
9019 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9020 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9021 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9022 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9023 max_jpeg_dim.width * max_jpeg_dim.height ) {
9024 max_jpeg_dim.width = curr_jpeg_dim.width;
9025 max_jpeg_dim.height = curr_jpeg_dim.height;
9026 }
9027 }
9028 return max_jpeg_dim;
9029}
9030
9031/*===========================================================================
9032 * FUNCTION : addStreamConfig
9033 *
9034 * DESCRIPTION: adds the stream configuration to the array
9035 *
9036 * PARAMETERS :
9037 * @available_stream_configs : pointer to stream configuration array
9038 * @scalar_format : scalar format
9039 * @dim : configuration dimension
9040 * @config_type : input or output configuration type
9041 *
9042 * RETURN : NONE
9043 *==========================================================================*/
9044void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9045 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9046{
9047 available_stream_configs.add(scalar_format);
9048 available_stream_configs.add(dim.width);
9049 available_stream_configs.add(dim.height);
9050 available_stream_configs.add(config_type);
9051}
9052
9053/*===========================================================================
9054 * FUNCTION : suppportBurstCapture
9055 *
9056 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9057 *
9058 * PARAMETERS :
9059 * @cameraId : camera Id
9060 *
9061 * RETURN : true if camera supports BURST_CAPTURE
9062 * false otherwise
9063 *==========================================================================*/
9064bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9065{
9066 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9067 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9068 const int32_t highResWidth = 3264;
9069 const int32_t highResHeight = 2448;
9070
9071 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9072 // Maximum resolution images cannot be captured at >= 10fps
9073 // -> not supporting BURST_CAPTURE
9074 return false;
9075 }
9076
9077 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9078 // Maximum resolution images can be captured at >= 20fps
9079 // --> supporting BURST_CAPTURE
9080 return true;
9081 }
9082
9083 // Find the smallest highRes resolution, or largest resolution if there is none
9084 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9085 MAX_SIZES_CNT);
9086 size_t highRes = 0;
9087 while ((highRes + 1 < totalCnt) &&
9088 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9089 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9090 highResWidth * highResHeight)) {
9091 highRes++;
9092 }
9093 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9094 return true;
9095 } else {
9096 return false;
9097 }
9098}
9099
9100/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009101 * FUNCTION : getPDStatIndex
9102 *
9103 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9104 *
9105 * PARAMETERS :
9106 * @caps : camera capabilities
9107 *
9108 * RETURN : int32_t type
9109 * non-negative - on success
9110 * -1 - on failure
9111 *==========================================================================*/
9112int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9113 if (nullptr == caps) {
9114 return -1;
9115 }
9116
9117 uint32_t metaRawCount = caps->meta_raw_channel_count;
9118 int32_t ret = -1;
9119 for (size_t i = 0; i < metaRawCount; i++) {
9120 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9121 ret = i;
9122 break;
9123 }
9124 }
9125
9126 return ret;
9127}
9128
9129/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009130 * FUNCTION : initStaticMetadata
9131 *
9132 * DESCRIPTION: initialize the static metadata
9133 *
9134 * PARAMETERS :
9135 * @cameraId : camera Id
9136 *
9137 * RETURN : int32_t type of status
9138 * 0 -- success
9139 * non-zero failure code
9140 *==========================================================================*/
9141int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9142{
9143 int rc = 0;
9144 CameraMetadata staticInfo;
9145 size_t count = 0;
9146 bool limitedDevice = false;
9147 char prop[PROPERTY_VALUE_MAX];
9148 bool supportBurst = false;
9149
9150 supportBurst = supportBurstCapture(cameraId);
9151
9152 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9153 * guaranteed or if min fps of max resolution is less than 20 fps, its
9154 * advertised as limited device*/
9155 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9156 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9157 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9158 !supportBurst;
9159
9160 uint8_t supportedHwLvl = limitedDevice ?
9161 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009162#ifndef USE_HAL_3_3
9163 // LEVEL_3 - This device will support level 3.
9164 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9165#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009166 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009167#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009168
9169 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9170 &supportedHwLvl, 1);
9171
9172 bool facingBack = false;
9173 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9174 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9175 facingBack = true;
9176 }
9177 /*HAL 3 only*/
9178 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9179 &gCamCapability[cameraId]->min_focus_distance, 1);
9180
9181 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9182 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9183
9184 /*should be using focal lengths but sensor doesn't provide that info now*/
9185 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9186 &gCamCapability[cameraId]->focal_length,
9187 1);
9188
9189 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9190 gCamCapability[cameraId]->apertures,
9191 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9192
9193 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9194 gCamCapability[cameraId]->filter_densities,
9195 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9196
9197
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009198 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9199 size_t mode_count =
9200 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9201 for (size_t i = 0; i < mode_count; i++) {
9202 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009204 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009205 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009206
9207 int32_t lens_shading_map_size[] = {
9208 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9209 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9210 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9211 lens_shading_map_size,
9212 sizeof(lens_shading_map_size)/sizeof(int32_t));
9213
9214 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9215 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9216
9217 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9218 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9219
9220 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9221 &gCamCapability[cameraId]->max_frame_duration, 1);
9222
9223 camera_metadata_rational baseGainFactor = {
9224 gCamCapability[cameraId]->base_gain_factor.numerator,
9225 gCamCapability[cameraId]->base_gain_factor.denominator};
9226 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9227 &baseGainFactor, 1);
9228
9229 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9230 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9231
9232 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9233 gCamCapability[cameraId]->pixel_array_size.height};
9234 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9235 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9236
9237 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9238 gCamCapability[cameraId]->active_array_size.top,
9239 gCamCapability[cameraId]->active_array_size.width,
9240 gCamCapability[cameraId]->active_array_size.height};
9241 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9242 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9243
9244 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9245 &gCamCapability[cameraId]->white_level, 1);
9246
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009247 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9248 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9249 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009250 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009251 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009252
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009253#ifndef USE_HAL_3_3
9254 bool hasBlackRegions = false;
9255 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9256 LOGW("black_region_count: %d is bounded to %d",
9257 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9258 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9259 }
9260 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9261 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9262 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9263 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9264 }
9265 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9266 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9267 hasBlackRegions = true;
9268 }
9269#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009270 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9271 &gCamCapability[cameraId]->flash_charge_duration, 1);
9272
9273 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9274 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9275
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009276 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9277 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9278 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009279 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9280 &timestampSource, 1);
9281
Thierry Strudel54dc9782017-02-15 12:12:10 -08009282 //update histogram vendor data
9283 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009284 &gCamCapability[cameraId]->histogram_size, 1);
9285
Thierry Strudel54dc9782017-02-15 12:12:10 -08009286 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009287 &gCamCapability[cameraId]->max_histogram_count, 1);
9288
Shuzhen Wang14415f52016-11-16 18:26:18 -08009289 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9290 //so that app can request fewer number of bins than the maximum supported.
9291 std::vector<int32_t> histBins;
9292 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9293 histBins.push_back(maxHistBins);
9294 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9295 (maxHistBins & 0x1) == 0) {
9296 histBins.push_back(maxHistBins >> 1);
9297 maxHistBins >>= 1;
9298 }
9299 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9300 histBins.data(), histBins.size());
9301
Thierry Strudel3d639192016-09-09 11:52:26 -07009302 int32_t sharpness_map_size[] = {
9303 gCamCapability[cameraId]->sharpness_map_size.width,
9304 gCamCapability[cameraId]->sharpness_map_size.height};
9305
9306 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9307 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9308
9309 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9310 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9311
Emilian Peev0f3c3162017-03-15 12:57:46 +00009312 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9313 if (0 <= indexPD) {
9314 // Advertise PD stats data as part of the Depth capabilities
9315 int32_t depthWidth =
9316 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9317 int32_t depthHeight =
9318 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009319 int32_t depthStride =
9320 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009321 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9322 assert(0 < depthSamplesCount);
9323 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9324 &depthSamplesCount, 1);
9325
9326 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9327 depthHeight,
9328 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9329 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9330 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9331 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9332 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9333
9334 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9335 depthHeight, 33333333,
9336 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9337 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9338 depthMinDuration,
9339 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9340
9341 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9342 depthHeight, 0,
9343 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9344 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9345 depthStallDuration,
9346 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9347
9348 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9349 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009350
9351 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9352 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9353 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009354 }
9355
Thierry Strudel3d639192016-09-09 11:52:26 -07009356 int32_t scalar_formats[] = {
9357 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9358 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9359 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9360 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9361 HAL_PIXEL_FORMAT_RAW10,
9362 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009363 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9364 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9365 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009366
9367 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9368 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9369 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9370 count, MAX_SIZES_CNT, available_processed_sizes);
9371 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9372 available_processed_sizes, count * 2);
9373
9374 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9375 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9376 makeTable(gCamCapability[cameraId]->raw_dim,
9377 count, MAX_SIZES_CNT, available_raw_sizes);
9378 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9379 available_raw_sizes, count * 2);
9380
9381 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9382 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9383 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9384 count, MAX_SIZES_CNT, available_fps_ranges);
9385 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9386 available_fps_ranges, count * 2);
9387
9388 camera_metadata_rational exposureCompensationStep = {
9389 gCamCapability[cameraId]->exp_compensation_step.numerator,
9390 gCamCapability[cameraId]->exp_compensation_step.denominator};
9391 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9392 &exposureCompensationStep, 1);
9393
9394 Vector<uint8_t> availableVstabModes;
9395 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9396 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009397 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009398 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009399 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009400 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009401 count = IS_TYPE_MAX;
9402 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9403 for (size_t i = 0; i < count; i++) {
9404 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9405 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9406 eisSupported = true;
9407 break;
9408 }
9409 }
9410 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009411 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9412 }
9413 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9414 availableVstabModes.array(), availableVstabModes.size());
9415
9416 /*HAL 1 and HAL 3 common*/
9417 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9418 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9419 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009420 // Cap the max zoom to the max preferred value
9421 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009422 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9423 &maxZoom, 1);
9424
9425 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9426 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9427
9428 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9429 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9430 max3aRegions[2] = 0; /* AF not supported */
9431 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9432 max3aRegions, 3);
9433
9434 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9435 memset(prop, 0, sizeof(prop));
9436 property_get("persist.camera.facedetect", prop, "1");
9437 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9438 LOGD("Support face detection mode: %d",
9439 supportedFaceDetectMode);
9440
9441 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009442 /* support mode should be OFF if max number of face is 0 */
9443 if (maxFaces <= 0) {
9444 supportedFaceDetectMode = 0;
9445 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009446 Vector<uint8_t> availableFaceDetectModes;
9447 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9448 if (supportedFaceDetectMode == 1) {
9449 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9450 } else if (supportedFaceDetectMode == 2) {
9451 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9452 } else if (supportedFaceDetectMode == 3) {
9453 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9454 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9455 } else {
9456 maxFaces = 0;
9457 }
9458 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9459 availableFaceDetectModes.array(),
9460 availableFaceDetectModes.size());
9461 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9462 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009463 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9464 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9465 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009466
9467 int32_t exposureCompensationRange[] = {
9468 gCamCapability[cameraId]->exposure_compensation_min,
9469 gCamCapability[cameraId]->exposure_compensation_max};
9470 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9471 exposureCompensationRange,
9472 sizeof(exposureCompensationRange)/sizeof(int32_t));
9473
9474 uint8_t lensFacing = (facingBack) ?
9475 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9476 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9477
9478 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9479 available_thumbnail_sizes,
9480 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9481
9482 /*all sizes will be clubbed into this tag*/
9483 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9484 /*android.scaler.availableStreamConfigurations*/
9485 Vector<int32_t> available_stream_configs;
9486 cam_dimension_t active_array_dim;
9487 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9488 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009489
9490 /*advertise list of input dimensions supported based on below property.
9491 By default all sizes upto 5MP will be advertised.
9492 Note that the setprop resolution format should be WxH.
9493 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9494 To list all supported sizes, setprop needs to be set with "0x0" */
9495 cam_dimension_t minInputSize = {2592,1944}; //5MP
9496 memset(prop, 0, sizeof(prop));
9497 property_get("persist.camera.input.minsize", prop, "2592x1944");
9498 if (strlen(prop) > 0) {
9499 char *saveptr = NULL;
9500 char *token = strtok_r(prop, "x", &saveptr);
9501 if (token != NULL) {
9502 minInputSize.width = atoi(token);
9503 }
9504 token = strtok_r(NULL, "x", &saveptr);
9505 if (token != NULL) {
9506 minInputSize.height = atoi(token);
9507 }
9508 }
9509
Thierry Strudel3d639192016-09-09 11:52:26 -07009510 /* Add input/output stream configurations for each scalar formats*/
9511 for (size_t j = 0; j < scalar_formats_count; j++) {
9512 switch (scalar_formats[j]) {
9513 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9514 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9515 case HAL_PIXEL_FORMAT_RAW10:
9516 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9517 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9518 addStreamConfig(available_stream_configs, scalar_formats[j],
9519 gCamCapability[cameraId]->raw_dim[i],
9520 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9521 }
9522 break;
9523 case HAL_PIXEL_FORMAT_BLOB:
9524 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9525 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9526 addStreamConfig(available_stream_configs, scalar_formats[j],
9527 gCamCapability[cameraId]->picture_sizes_tbl[i],
9528 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9529 }
9530 break;
9531 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9532 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9533 default:
9534 cam_dimension_t largest_picture_size;
9535 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9536 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9537 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9538 addStreamConfig(available_stream_configs, scalar_formats[j],
9539 gCamCapability[cameraId]->picture_sizes_tbl[i],
9540 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009541 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009542 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9543 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009544 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9545 >= minInputSize.width) || (gCamCapability[cameraId]->
9546 picture_sizes_tbl[i].height >= minInputSize.height)) {
9547 addStreamConfig(available_stream_configs, scalar_formats[j],
9548 gCamCapability[cameraId]->picture_sizes_tbl[i],
9549 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9550 }
9551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009552 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009553
Thierry Strudel3d639192016-09-09 11:52:26 -07009554 break;
9555 }
9556 }
9557
9558 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9559 available_stream_configs.array(), available_stream_configs.size());
9560 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9561 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9562
9563 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9564 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9565
9566 /* android.scaler.availableMinFrameDurations */
9567 Vector<int64_t> available_min_durations;
9568 for (size_t j = 0; j < scalar_formats_count; j++) {
9569 switch (scalar_formats[j]) {
9570 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9571 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9572 case HAL_PIXEL_FORMAT_RAW10:
9573 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9574 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9575 available_min_durations.add(scalar_formats[j]);
9576 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9577 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9578 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9579 }
9580 break;
9581 default:
9582 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9583 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9584 available_min_durations.add(scalar_formats[j]);
9585 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9586 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9587 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9588 }
9589 break;
9590 }
9591 }
9592 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9593 available_min_durations.array(), available_min_durations.size());
9594
9595 Vector<int32_t> available_hfr_configs;
9596 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9597 int32_t fps = 0;
9598 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9599 case CAM_HFR_MODE_60FPS:
9600 fps = 60;
9601 break;
9602 case CAM_HFR_MODE_90FPS:
9603 fps = 90;
9604 break;
9605 case CAM_HFR_MODE_120FPS:
9606 fps = 120;
9607 break;
9608 case CAM_HFR_MODE_150FPS:
9609 fps = 150;
9610 break;
9611 case CAM_HFR_MODE_180FPS:
9612 fps = 180;
9613 break;
9614 case CAM_HFR_MODE_210FPS:
9615 fps = 210;
9616 break;
9617 case CAM_HFR_MODE_240FPS:
9618 fps = 240;
9619 break;
9620 case CAM_HFR_MODE_480FPS:
9621 fps = 480;
9622 break;
9623 case CAM_HFR_MODE_OFF:
9624 case CAM_HFR_MODE_MAX:
9625 default:
9626 break;
9627 }
9628
9629 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9630 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9631 /* For each HFR frame rate, need to advertise one variable fps range
9632 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9633 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9634 * set by the app. When video recording is started, [120, 120] is
9635 * set. This way sensor configuration does not change when recording
9636 * is started */
9637
9638 /* (width, height, fps_min, fps_max, batch_size_max) */
9639 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9640 j < MAX_SIZES_CNT; j++) {
9641 available_hfr_configs.add(
9642 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9643 available_hfr_configs.add(
9644 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9645 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9646 available_hfr_configs.add(fps);
9647 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9648
9649 /* (width, height, fps_min, fps_max, batch_size_max) */
9650 available_hfr_configs.add(
9651 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9652 available_hfr_configs.add(
9653 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9654 available_hfr_configs.add(fps);
9655 available_hfr_configs.add(fps);
9656 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9657 }
9658 }
9659 }
9660 //Advertise HFR capability only if the property is set
9661 memset(prop, 0, sizeof(prop));
9662 property_get("persist.camera.hal3hfr.enable", prop, "1");
9663 uint8_t hfrEnable = (uint8_t)atoi(prop);
9664
9665 if(hfrEnable && available_hfr_configs.array()) {
9666 staticInfo.update(
9667 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9668 available_hfr_configs.array(), available_hfr_configs.size());
9669 }
9670
9671 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9672 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9673 &max_jpeg_size, 1);
9674
9675 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9676 size_t size = 0;
9677 count = CAM_EFFECT_MODE_MAX;
9678 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9679 for (size_t i = 0; i < count; i++) {
9680 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9681 gCamCapability[cameraId]->supported_effects[i]);
9682 if (NAME_NOT_FOUND != val) {
9683 avail_effects[size] = (uint8_t)val;
9684 size++;
9685 }
9686 }
9687 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9688 avail_effects,
9689 size);
9690
9691 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9692 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9693 size_t supported_scene_modes_cnt = 0;
9694 count = CAM_SCENE_MODE_MAX;
9695 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9696 for (size_t i = 0; i < count; i++) {
9697 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9698 CAM_SCENE_MODE_OFF) {
9699 int val = lookupFwkName(SCENE_MODES_MAP,
9700 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9701 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009702
Thierry Strudel3d639192016-09-09 11:52:26 -07009703 if (NAME_NOT_FOUND != val) {
9704 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9705 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9706 supported_scene_modes_cnt++;
9707 }
9708 }
9709 }
9710 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9711 avail_scene_modes,
9712 supported_scene_modes_cnt);
9713
9714 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9715 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9716 supported_scene_modes_cnt,
9717 CAM_SCENE_MODE_MAX,
9718 scene_mode_overrides,
9719 supported_indexes,
9720 cameraId);
9721
9722 if (supported_scene_modes_cnt == 0) {
9723 supported_scene_modes_cnt = 1;
9724 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9725 }
9726
9727 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9728 scene_mode_overrides, supported_scene_modes_cnt * 3);
9729
9730 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9731 ANDROID_CONTROL_MODE_AUTO,
9732 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9733 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9734 available_control_modes,
9735 3);
9736
9737 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9738 size = 0;
9739 count = CAM_ANTIBANDING_MODE_MAX;
9740 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9741 for (size_t i = 0; i < count; i++) {
9742 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9743 gCamCapability[cameraId]->supported_antibandings[i]);
9744 if (NAME_NOT_FOUND != val) {
9745 avail_antibanding_modes[size] = (uint8_t)val;
9746 size++;
9747 }
9748
9749 }
9750 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9751 avail_antibanding_modes,
9752 size);
9753
9754 uint8_t avail_abberation_modes[] = {
9755 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9756 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9757 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9758 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9759 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9760 if (0 == count) {
9761 // If no aberration correction modes are available for a device, this advertise OFF mode
9762 size = 1;
9763 } else {
9764 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9765 // So, advertize all 3 modes if atleast any one mode is supported as per the
9766 // new M requirement
9767 size = 3;
9768 }
9769 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9770 avail_abberation_modes,
9771 size);
9772
9773 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9774 size = 0;
9775 count = CAM_FOCUS_MODE_MAX;
9776 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9777 for (size_t i = 0; i < count; i++) {
9778 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9779 gCamCapability[cameraId]->supported_focus_modes[i]);
9780 if (NAME_NOT_FOUND != val) {
9781 avail_af_modes[size] = (uint8_t)val;
9782 size++;
9783 }
9784 }
9785 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9786 avail_af_modes,
9787 size);
9788
9789 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9790 size = 0;
9791 count = CAM_WB_MODE_MAX;
9792 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9793 for (size_t i = 0; i < count; i++) {
9794 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9795 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9796 gCamCapability[cameraId]->supported_white_balances[i]);
9797 if (NAME_NOT_FOUND != val) {
9798 avail_awb_modes[size] = (uint8_t)val;
9799 size++;
9800 }
9801 }
9802 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9803 avail_awb_modes,
9804 size);
9805
9806 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9807 count = CAM_FLASH_FIRING_LEVEL_MAX;
9808 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9809 count);
9810 for (size_t i = 0; i < count; i++) {
9811 available_flash_levels[i] =
9812 gCamCapability[cameraId]->supported_firing_levels[i];
9813 }
9814 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9815 available_flash_levels, count);
9816
9817 uint8_t flashAvailable;
9818 if (gCamCapability[cameraId]->flash_available)
9819 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9820 else
9821 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9822 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9823 &flashAvailable, 1);
9824
9825 Vector<uint8_t> avail_ae_modes;
9826 count = CAM_AE_MODE_MAX;
9827 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9828 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009829 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9830 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9831 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9832 }
9833 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009834 }
9835 if (flashAvailable) {
9836 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9837 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9838 }
9839 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9840 avail_ae_modes.array(),
9841 avail_ae_modes.size());
9842
9843 int32_t sensitivity_range[2];
9844 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9845 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9846 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9847 sensitivity_range,
9848 sizeof(sensitivity_range) / sizeof(int32_t));
9849
9850 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9851 &gCamCapability[cameraId]->max_analog_sensitivity,
9852 1);
9853
9854 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9855 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9856 &sensor_orientation,
9857 1);
9858
9859 int32_t max_output_streams[] = {
9860 MAX_STALLING_STREAMS,
9861 MAX_PROCESSED_STREAMS,
9862 MAX_RAW_STREAMS};
9863 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9864 max_output_streams,
9865 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9866
9867 uint8_t avail_leds = 0;
9868 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9869 &avail_leds, 0);
9870
9871 uint8_t focus_dist_calibrated;
9872 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9873 gCamCapability[cameraId]->focus_dist_calibrated);
9874 if (NAME_NOT_FOUND != val) {
9875 focus_dist_calibrated = (uint8_t)val;
9876 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9877 &focus_dist_calibrated, 1);
9878 }
9879
9880 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9881 size = 0;
9882 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9883 MAX_TEST_PATTERN_CNT);
9884 for (size_t i = 0; i < count; i++) {
9885 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9886 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9887 if (NAME_NOT_FOUND != testpatternMode) {
9888 avail_testpattern_modes[size] = testpatternMode;
9889 size++;
9890 }
9891 }
9892 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9893 avail_testpattern_modes,
9894 size);
9895
9896 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9897 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9898 &max_pipeline_depth,
9899 1);
9900
9901 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9902 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9903 &partial_result_count,
9904 1);
9905
9906 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9907 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9908
9909 Vector<uint8_t> available_capabilities;
9910 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9911 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9912 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9913 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9914 if (supportBurst) {
9915 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9916 }
9917 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9918 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9919 if (hfrEnable && available_hfr_configs.array()) {
9920 available_capabilities.add(
9921 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9922 }
9923
9924 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9925 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9926 }
9927 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9928 available_capabilities.array(),
9929 available_capabilities.size());
9930
9931 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9932 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9933 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9934 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9935
9936 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9937 &aeLockAvailable, 1);
9938
9939 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9940 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9941 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9942 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9943
9944 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9945 &awbLockAvailable, 1);
9946
9947 int32_t max_input_streams = 1;
9948 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9949 &max_input_streams,
9950 1);
9951
9952 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9953 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9954 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9955 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9956 HAL_PIXEL_FORMAT_YCbCr_420_888};
9957 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9958 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9959
9960 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9961 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9962 &max_latency,
9963 1);
9964
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009965#ifndef USE_HAL_3_3
9966 int32_t isp_sensitivity_range[2];
9967 isp_sensitivity_range[0] =
9968 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9969 isp_sensitivity_range[1] =
9970 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9971 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9972 isp_sensitivity_range,
9973 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9974#endif
9975
Thierry Strudel3d639192016-09-09 11:52:26 -07009976 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9977 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9978 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9979 available_hot_pixel_modes,
9980 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9981
9982 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9983 ANDROID_SHADING_MODE_FAST,
9984 ANDROID_SHADING_MODE_HIGH_QUALITY};
9985 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9986 available_shading_modes,
9987 3);
9988
9989 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9990 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9991 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9992 available_lens_shading_map_modes,
9993 2);
9994
9995 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9996 ANDROID_EDGE_MODE_FAST,
9997 ANDROID_EDGE_MODE_HIGH_QUALITY,
9998 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9999 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10000 available_edge_modes,
10001 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10002
10003 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10004 ANDROID_NOISE_REDUCTION_MODE_FAST,
10005 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10006 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10007 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10008 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10009 available_noise_red_modes,
10010 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10011
10012 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10013 ANDROID_TONEMAP_MODE_FAST,
10014 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10015 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10016 available_tonemap_modes,
10017 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10018
10019 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10020 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10021 available_hot_pixel_map_modes,
10022 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10023
10024 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10025 gCamCapability[cameraId]->reference_illuminant1);
10026 if (NAME_NOT_FOUND != val) {
10027 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10028 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10029 }
10030
10031 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10032 gCamCapability[cameraId]->reference_illuminant2);
10033 if (NAME_NOT_FOUND != val) {
10034 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10035 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10036 }
10037
10038 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10039 (void *)gCamCapability[cameraId]->forward_matrix1,
10040 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10041
10042 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10043 (void *)gCamCapability[cameraId]->forward_matrix2,
10044 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10045
10046 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10047 (void *)gCamCapability[cameraId]->color_transform1,
10048 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10049
10050 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10051 (void *)gCamCapability[cameraId]->color_transform2,
10052 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10053
10054 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10055 (void *)gCamCapability[cameraId]->calibration_transform1,
10056 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10057
10058 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10059 (void *)gCamCapability[cameraId]->calibration_transform2,
10060 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10061
10062 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10063 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10064 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10065 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10066 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10067 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10068 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10069 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10070 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10071 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10072 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10073 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10074 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10075 ANDROID_JPEG_GPS_COORDINATES,
10076 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10077 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10078 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10079 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10080 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10081 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10082 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10083 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10084 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10085 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010086#ifndef USE_HAL_3_3
10087 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10088#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010089 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010090 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010091 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10092 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010093 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010094 /* DevCamDebug metadata request_keys_basic */
10095 DEVCAMDEBUG_META_ENABLE,
10096 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010097 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010098 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010099 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010100 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010101 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010102 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010103
10104 size_t request_keys_cnt =
10105 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10106 Vector<int32_t> available_request_keys;
10107 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10108 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10109 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10110 }
10111
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010112 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010113 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10114 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10115 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010116 }
10117
Thierry Strudel3d639192016-09-09 11:52:26 -070010118 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10119 available_request_keys.array(), available_request_keys.size());
10120
10121 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10122 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10123 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10124 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10125 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10126 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10127 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10128 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10129 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10130 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10131 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10132 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10133 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10134 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10135 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10136 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10137 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010138 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010139 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10140 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10141 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010142 ANDROID_STATISTICS_FACE_SCORES,
10143#ifndef USE_HAL_3_3
10144 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10145#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010146 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010147 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010148 // DevCamDebug metadata result_keys_basic
10149 DEVCAMDEBUG_META_ENABLE,
10150 // DevCamDebug metadata result_keys AF
10151 DEVCAMDEBUG_AF_LENS_POSITION,
10152 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10153 DEVCAMDEBUG_AF_TOF_DISTANCE,
10154 DEVCAMDEBUG_AF_LUMA,
10155 DEVCAMDEBUG_AF_HAF_STATE,
10156 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10157 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10158 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10159 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10160 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10161 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10162 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10163 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10164 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10165 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10166 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10167 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10168 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10169 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10170 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10171 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10172 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10173 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10174 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10175 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10176 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10177 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10178 // DevCamDebug metadata result_keys AEC
10179 DEVCAMDEBUG_AEC_TARGET_LUMA,
10180 DEVCAMDEBUG_AEC_COMP_LUMA,
10181 DEVCAMDEBUG_AEC_AVG_LUMA,
10182 DEVCAMDEBUG_AEC_CUR_LUMA,
10183 DEVCAMDEBUG_AEC_LINECOUNT,
10184 DEVCAMDEBUG_AEC_REAL_GAIN,
10185 DEVCAMDEBUG_AEC_EXP_INDEX,
10186 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010187 // DevCamDebug metadata result_keys zzHDR
10188 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10189 DEVCAMDEBUG_AEC_L_LINECOUNT,
10190 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10191 DEVCAMDEBUG_AEC_S_LINECOUNT,
10192 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10193 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10194 // DevCamDebug metadata result_keys ADRC
10195 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10196 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10197 DEVCAMDEBUG_AEC_GTM_RATIO,
10198 DEVCAMDEBUG_AEC_LTM_RATIO,
10199 DEVCAMDEBUG_AEC_LA_RATIO,
10200 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010201 // DevCamDebug metadata result_keys AWB
10202 DEVCAMDEBUG_AWB_R_GAIN,
10203 DEVCAMDEBUG_AWB_G_GAIN,
10204 DEVCAMDEBUG_AWB_B_GAIN,
10205 DEVCAMDEBUG_AWB_CCT,
10206 DEVCAMDEBUG_AWB_DECISION,
10207 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010208 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10209 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10210 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010211 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010212 };
10213
Thierry Strudel3d639192016-09-09 11:52:26 -070010214 size_t result_keys_cnt =
10215 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10216
10217 Vector<int32_t> available_result_keys;
10218 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10219 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10220 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10221 }
10222 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10223 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10224 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10225 }
10226 if (supportedFaceDetectMode == 1) {
10227 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10228 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10229 } else if ((supportedFaceDetectMode == 2) ||
10230 (supportedFaceDetectMode == 3)) {
10231 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10232 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10233 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010234#ifndef USE_HAL_3_3
10235 if (hasBlackRegions) {
10236 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10237 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10238 }
10239#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010240
10241 if (gExposeEnableZslKey) {
10242 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10243 }
10244
Thierry Strudel3d639192016-09-09 11:52:26 -070010245 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10246 available_result_keys.array(), available_result_keys.size());
10247
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010248 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010249 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10250 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10251 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10252 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10253 ANDROID_SCALER_CROPPING_TYPE,
10254 ANDROID_SYNC_MAX_LATENCY,
10255 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10256 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10257 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10258 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10259 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10260 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10261 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10262 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10263 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10264 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10265 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10266 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10267 ANDROID_LENS_FACING,
10268 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10269 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10270 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10271 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10272 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10273 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10274 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10275 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10276 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10277 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10278 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10279 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10280 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10281 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10282 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10283 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10284 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10285 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10286 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10287 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010288 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010289 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10290 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10291 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10292 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10293 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10294 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10295 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10296 ANDROID_CONTROL_AVAILABLE_MODES,
10297 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10298 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10299 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10300 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010301 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10302#ifndef USE_HAL_3_3
10303 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10304 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10305#endif
10306 };
10307
10308 Vector<int32_t> available_characteristics_keys;
10309 available_characteristics_keys.appendArray(characteristics_keys_basic,
10310 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10311#ifndef USE_HAL_3_3
10312 if (hasBlackRegions) {
10313 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10314 }
10315#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010316
10317 if (0 <= indexPD) {
10318 int32_t depthKeys[] = {
10319 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10320 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10321 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10322 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10323 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10324 };
10325 available_characteristics_keys.appendArray(depthKeys,
10326 sizeof(depthKeys) / sizeof(depthKeys[0]));
10327 }
10328
Thierry Strudel3d639192016-09-09 11:52:26 -070010329 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010330 available_characteristics_keys.array(),
10331 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010332
10333 /*available stall durations depend on the hw + sw and will be different for different devices */
10334 /*have to add for raw after implementation*/
10335 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10336 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10337
10338 Vector<int64_t> available_stall_durations;
10339 for (uint32_t j = 0; j < stall_formats_count; j++) {
10340 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10341 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10342 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10343 available_stall_durations.add(stall_formats[j]);
10344 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10345 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10346 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10347 }
10348 } else {
10349 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10350 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10351 available_stall_durations.add(stall_formats[j]);
10352 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10353 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10354 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10355 }
10356 }
10357 }
10358 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10359 available_stall_durations.array(),
10360 available_stall_durations.size());
10361
10362 //QCAMERA3_OPAQUE_RAW
10363 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10364 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10365 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10366 case LEGACY_RAW:
10367 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10368 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10369 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10370 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10371 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10372 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10373 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10374 break;
10375 case MIPI_RAW:
10376 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10377 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10378 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10379 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10380 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10381 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10382 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10383 break;
10384 default:
10385 LOGE("unknown opaque_raw_format %d",
10386 gCamCapability[cameraId]->opaque_raw_fmt);
10387 break;
10388 }
10389 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10390
10391 Vector<int32_t> strides;
10392 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10393 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10394 cam_stream_buf_plane_info_t buf_planes;
10395 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10396 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10397 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10398 &gCamCapability[cameraId]->padding_info, &buf_planes);
10399 strides.add(buf_planes.plane_info.mp[0].stride);
10400 }
10401 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10402 strides.size());
10403
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010404 //TBD: remove the following line once backend advertises zzHDR in feature mask
10405 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010406 //Video HDR default
10407 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10408 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010409 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010410 int32_t vhdr_mode[] = {
10411 QCAMERA3_VIDEO_HDR_MODE_OFF,
10412 QCAMERA3_VIDEO_HDR_MODE_ON};
10413
10414 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10415 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10416 vhdr_mode, vhdr_mode_count);
10417 }
10418
Thierry Strudel3d639192016-09-09 11:52:26 -070010419 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10420 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10421 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10422
10423 uint8_t isMonoOnly =
10424 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10425 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10426 &isMonoOnly, 1);
10427
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010428#ifndef USE_HAL_3_3
10429 Vector<int32_t> opaque_size;
10430 for (size_t j = 0; j < scalar_formats_count; j++) {
10431 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10432 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10433 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10434 cam_stream_buf_plane_info_t buf_planes;
10435
10436 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10437 &gCamCapability[cameraId]->padding_info, &buf_planes);
10438
10439 if (rc == 0) {
10440 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10441 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10442 opaque_size.add(buf_planes.plane_info.frame_len);
10443 }else {
10444 LOGE("raw frame calculation failed!");
10445 }
10446 }
10447 }
10448 }
10449
10450 if ((opaque_size.size() > 0) &&
10451 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10452 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10453 else
10454 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10455#endif
10456
Thierry Strudel04e026f2016-10-10 11:27:36 -070010457 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10458 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10459 size = 0;
10460 count = CAM_IR_MODE_MAX;
10461 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10462 for (size_t i = 0; i < count; i++) {
10463 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10464 gCamCapability[cameraId]->supported_ir_modes[i]);
10465 if (NAME_NOT_FOUND != val) {
10466 avail_ir_modes[size] = (int32_t)val;
10467 size++;
10468 }
10469 }
10470 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10471 avail_ir_modes, size);
10472 }
10473
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010474 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10475 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10476 size = 0;
10477 count = CAM_AEC_CONVERGENCE_MAX;
10478 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10479 for (size_t i = 0; i < count; i++) {
10480 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10481 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10482 if (NAME_NOT_FOUND != val) {
10483 available_instant_aec_modes[size] = (int32_t)val;
10484 size++;
10485 }
10486 }
10487 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10488 available_instant_aec_modes, size);
10489 }
10490
Thierry Strudel54dc9782017-02-15 12:12:10 -080010491 int32_t sharpness_range[] = {
10492 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10493 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10494 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10495
10496 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10497 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10498 size = 0;
10499 count = CAM_BINNING_CORRECTION_MODE_MAX;
10500 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10501 for (size_t i = 0; i < count; i++) {
10502 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10503 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10504 gCamCapability[cameraId]->supported_binning_modes[i]);
10505 if (NAME_NOT_FOUND != val) {
10506 avail_binning_modes[size] = (int32_t)val;
10507 size++;
10508 }
10509 }
10510 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10511 avail_binning_modes, size);
10512 }
10513
10514 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10515 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10516 size = 0;
10517 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10518 for (size_t i = 0; i < count; i++) {
10519 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10520 gCamCapability[cameraId]->supported_aec_modes[i]);
10521 if (NAME_NOT_FOUND != val)
10522 available_aec_modes[size++] = val;
10523 }
10524 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10525 available_aec_modes, size);
10526 }
10527
10528 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10529 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10530 size = 0;
10531 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10532 for (size_t i = 0; i < count; i++) {
10533 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10534 gCamCapability[cameraId]->supported_iso_modes[i]);
10535 if (NAME_NOT_FOUND != val)
10536 available_iso_modes[size++] = val;
10537 }
10538 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10539 available_iso_modes, size);
10540 }
10541
10542 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010543 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010544 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10545 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10546 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10547
10548 int32_t available_saturation_range[4];
10549 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10550 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10551 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10552 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10553 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10554 available_saturation_range, 4);
10555
10556 uint8_t is_hdr_values[2];
10557 is_hdr_values[0] = 0;
10558 is_hdr_values[1] = 1;
10559 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10560 is_hdr_values, 2);
10561
10562 float is_hdr_confidence_range[2];
10563 is_hdr_confidence_range[0] = 0.0;
10564 is_hdr_confidence_range[1] = 1.0;
10565 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10566 is_hdr_confidence_range, 2);
10567
Emilian Peev0a972ef2017-03-16 10:25:53 +000010568 size_t eepromLength = strnlen(
10569 reinterpret_cast<const char *>(
10570 gCamCapability[cameraId]->eeprom_version_info),
10571 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10572 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010573 char easelInfo[] = ",E:N";
10574 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10575 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10576 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010577 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10578 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010579 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010580 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010581 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10582 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10583 }
10584
Thierry Strudel3d639192016-09-09 11:52:26 -070010585 gStaticMetadata[cameraId] = staticInfo.release();
10586 return rc;
10587}
10588
10589/*===========================================================================
10590 * FUNCTION : makeTable
10591 *
10592 * DESCRIPTION: make a table of sizes
10593 *
10594 * PARAMETERS :
10595 *
10596 *
10597 *==========================================================================*/
10598void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10599 size_t max_size, int32_t *sizeTable)
10600{
10601 size_t j = 0;
10602 if (size > max_size) {
10603 size = max_size;
10604 }
10605 for (size_t i = 0; i < size; i++) {
10606 sizeTable[j] = dimTable[i].width;
10607 sizeTable[j+1] = dimTable[i].height;
10608 j+=2;
10609 }
10610}
10611
10612/*===========================================================================
10613 * FUNCTION : makeFPSTable
10614 *
10615 * DESCRIPTION: make a table of fps ranges
10616 *
10617 * PARAMETERS :
10618 *
10619 *==========================================================================*/
10620void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10621 size_t max_size, int32_t *fpsRangesTable)
10622{
10623 size_t j = 0;
10624 if (size > max_size) {
10625 size = max_size;
10626 }
10627 for (size_t i = 0; i < size; i++) {
10628 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10629 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10630 j+=2;
10631 }
10632}
10633
10634/*===========================================================================
10635 * FUNCTION : makeOverridesList
10636 *
10637 * DESCRIPTION: make a list of scene mode overrides
10638 *
10639 * PARAMETERS :
10640 *
10641 *
10642 *==========================================================================*/
10643void QCamera3HardwareInterface::makeOverridesList(
10644 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10645 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10646{
10647 /*daemon will give a list of overrides for all scene modes.
10648 However we should send the fwk only the overrides for the scene modes
10649 supported by the framework*/
10650 size_t j = 0;
10651 if (size > max_size) {
10652 size = max_size;
10653 }
10654 size_t focus_count = CAM_FOCUS_MODE_MAX;
10655 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10656 focus_count);
10657 for (size_t i = 0; i < size; i++) {
10658 bool supt = false;
10659 size_t index = supported_indexes[i];
10660 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10661 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10662 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10663 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10664 overridesTable[index].awb_mode);
10665 if (NAME_NOT_FOUND != val) {
10666 overridesList[j+1] = (uint8_t)val;
10667 }
10668 uint8_t focus_override = overridesTable[index].af_mode;
10669 for (size_t k = 0; k < focus_count; k++) {
10670 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10671 supt = true;
10672 break;
10673 }
10674 }
10675 if (supt) {
10676 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10677 focus_override);
10678 if (NAME_NOT_FOUND != val) {
10679 overridesList[j+2] = (uint8_t)val;
10680 }
10681 } else {
10682 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10683 }
10684 j+=3;
10685 }
10686}
10687
10688/*===========================================================================
10689 * FUNCTION : filterJpegSizes
10690 *
10691 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10692 * could be downscaled to
10693 *
10694 * PARAMETERS :
10695 *
10696 * RETURN : length of jpegSizes array
10697 *==========================================================================*/
10698
10699size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10700 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10701 uint8_t downscale_factor)
10702{
10703 if (0 == downscale_factor) {
10704 downscale_factor = 1;
10705 }
10706
10707 int32_t min_width = active_array_size.width / downscale_factor;
10708 int32_t min_height = active_array_size.height / downscale_factor;
10709 size_t jpegSizesCnt = 0;
10710 if (processedSizesCnt > maxCount) {
10711 processedSizesCnt = maxCount;
10712 }
10713 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10714 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10715 jpegSizes[jpegSizesCnt] = processedSizes[i];
10716 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10717 jpegSizesCnt += 2;
10718 }
10719 }
10720 return jpegSizesCnt;
10721}
10722
10723/*===========================================================================
10724 * FUNCTION : computeNoiseModelEntryS
10725 *
10726 * DESCRIPTION: function to map a given sensitivity to the S noise
10727 * model parameters in the DNG noise model.
10728 *
10729 * PARAMETERS : sens : the sensor sensitivity
10730 *
10731 ** RETURN : S (sensor amplification) noise
10732 *
10733 *==========================================================================*/
10734double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10735 double s = gCamCapability[mCameraId]->gradient_S * sens +
10736 gCamCapability[mCameraId]->offset_S;
10737 return ((s < 0.0) ? 0.0 : s);
10738}
10739
10740/*===========================================================================
10741 * FUNCTION : computeNoiseModelEntryO
10742 *
10743 * DESCRIPTION: function to map a given sensitivity to the O noise
10744 * model parameters in the DNG noise model.
10745 *
10746 * PARAMETERS : sens : the sensor sensitivity
10747 *
10748 ** RETURN : O (sensor readout) noise
10749 *
10750 *==========================================================================*/
10751double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10752 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10753 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10754 1.0 : (1.0 * sens / max_analog_sens);
10755 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10756 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10757 return ((o < 0.0) ? 0.0 : o);
10758}
10759
10760/*===========================================================================
10761 * FUNCTION : getSensorSensitivity
10762 *
10763 * DESCRIPTION: convert iso_mode to an integer value
10764 *
10765 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10766 *
10767 ** RETURN : sensitivity supported by sensor
10768 *
10769 *==========================================================================*/
10770int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10771{
10772 int32_t sensitivity;
10773
10774 switch (iso_mode) {
10775 case CAM_ISO_MODE_100:
10776 sensitivity = 100;
10777 break;
10778 case CAM_ISO_MODE_200:
10779 sensitivity = 200;
10780 break;
10781 case CAM_ISO_MODE_400:
10782 sensitivity = 400;
10783 break;
10784 case CAM_ISO_MODE_800:
10785 sensitivity = 800;
10786 break;
10787 case CAM_ISO_MODE_1600:
10788 sensitivity = 1600;
10789 break;
10790 default:
10791 sensitivity = -1;
10792 break;
10793 }
10794 return sensitivity;
10795}
10796
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010797int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010798 if (gEaselManagerClient == nullptr) {
10799 gEaselManagerClient = EaselManagerClient::create();
10800 if (gEaselManagerClient == nullptr) {
10801 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10802 return -ENODEV;
10803 }
10804 }
10805
10806 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010807 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10808 // to connect to Easel.
10809 bool doNotpowerOnEasel =
10810 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10811
10812 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010813 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10814 return OK;
10815 }
10816
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010817 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010818 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010819 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010820 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010821 return res;
10822 }
10823
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010824 EaselManagerClientOpened = true;
10825
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010826 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010827 if (res != OK) {
10828 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10829 }
10830
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010831 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010832 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010833
10834 // Expose enableZsl key only when HDR+ mode is enabled.
10835 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010836 }
10837
10838 return OK;
10839}
10840
Thierry Strudel3d639192016-09-09 11:52:26 -070010841/*===========================================================================
10842 * FUNCTION : getCamInfo
10843 *
10844 * DESCRIPTION: query camera capabilities
10845 *
10846 * PARAMETERS :
10847 * @cameraId : camera Id
10848 * @info : camera info struct to be filled in with camera capabilities
10849 *
10850 * RETURN : int type of status
10851 * NO_ERROR -- success
10852 * none-zero failure code
10853 *==========================================================================*/
10854int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10855 struct camera_info *info)
10856{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010857 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010858 int rc = 0;
10859
10860 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010861
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010862 {
10863 Mutex::Autolock l(gHdrPlusClientLock);
10864 rc = initHdrPlusClientLocked();
10865 if (rc != OK) {
10866 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10867 pthread_mutex_unlock(&gCamLock);
10868 return rc;
10869 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010870 }
10871
Thierry Strudel3d639192016-09-09 11:52:26 -070010872 if (NULL == gCamCapability[cameraId]) {
10873 rc = initCapabilities(cameraId);
10874 if (rc < 0) {
10875 pthread_mutex_unlock(&gCamLock);
10876 return rc;
10877 }
10878 }
10879
10880 if (NULL == gStaticMetadata[cameraId]) {
10881 rc = initStaticMetadata(cameraId);
10882 if (rc < 0) {
10883 pthread_mutex_unlock(&gCamLock);
10884 return rc;
10885 }
10886 }
10887
10888 switch(gCamCapability[cameraId]->position) {
10889 case CAM_POSITION_BACK:
10890 case CAM_POSITION_BACK_AUX:
10891 info->facing = CAMERA_FACING_BACK;
10892 break;
10893
10894 case CAM_POSITION_FRONT:
10895 case CAM_POSITION_FRONT_AUX:
10896 info->facing = CAMERA_FACING_FRONT;
10897 break;
10898
10899 default:
10900 LOGE("Unknown position type %d for camera id:%d",
10901 gCamCapability[cameraId]->position, cameraId);
10902 rc = -1;
10903 break;
10904 }
10905
10906
10907 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010908#ifndef USE_HAL_3_3
10909 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10910#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010911 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010912#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010913 info->static_camera_characteristics = gStaticMetadata[cameraId];
10914
10915 //For now assume both cameras can operate independently.
10916 info->conflicting_devices = NULL;
10917 info->conflicting_devices_length = 0;
10918
10919 //resource cost is 100 * MIN(1.0, m/M),
10920 //where m is throughput requirement with maximum stream configuration
10921 //and M is CPP maximum throughput.
10922 float max_fps = 0.0;
10923 for (uint32_t i = 0;
10924 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10925 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10926 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10927 }
10928 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10929 gCamCapability[cameraId]->active_array_size.width *
10930 gCamCapability[cameraId]->active_array_size.height * max_fps /
10931 gCamCapability[cameraId]->max_pixel_bandwidth;
10932 info->resource_cost = 100 * MIN(1.0, ratio);
10933 LOGI("camera %d resource cost is %d", cameraId,
10934 info->resource_cost);
10935
10936 pthread_mutex_unlock(&gCamLock);
10937 return rc;
10938}
10939
10940/*===========================================================================
10941 * FUNCTION : translateCapabilityToMetadata
10942 *
10943 * DESCRIPTION: translate the capability into camera_metadata_t
10944 *
10945 * PARAMETERS : type of the request
10946 *
10947 *
10948 * RETURN : success: camera_metadata_t*
10949 * failure: NULL
10950 *
10951 *==========================================================================*/
10952camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10953{
10954 if (mDefaultMetadata[type] != NULL) {
10955 return mDefaultMetadata[type];
10956 }
10957 //first time we are handling this request
10958 //fill up the metadata structure using the wrapper class
10959 CameraMetadata settings;
10960 //translate from cam_capability_t to camera_metadata_tag_t
10961 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10962 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10963 int32_t defaultRequestID = 0;
10964 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10965
10966 /* OIS disable */
10967 char ois_prop[PROPERTY_VALUE_MAX];
10968 memset(ois_prop, 0, sizeof(ois_prop));
10969 property_get("persist.camera.ois.disable", ois_prop, "0");
10970 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10971
10972 /* Force video to use OIS */
10973 char videoOisProp[PROPERTY_VALUE_MAX];
10974 memset(videoOisProp, 0, sizeof(videoOisProp));
10975 property_get("persist.camera.ois.video", videoOisProp, "1");
10976 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010977
10978 // Hybrid AE enable/disable
10979 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10980 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10981 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10982 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10983
Thierry Strudel3d639192016-09-09 11:52:26 -070010984 uint8_t controlIntent = 0;
10985 uint8_t focusMode;
10986 uint8_t vsMode;
10987 uint8_t optStabMode;
10988 uint8_t cacMode;
10989 uint8_t edge_mode;
10990 uint8_t noise_red_mode;
10991 uint8_t tonemap_mode;
10992 bool highQualityModeEntryAvailable = FALSE;
10993 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010994 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010995 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10996 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010997 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010998 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010999 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011000
Thierry Strudel3d639192016-09-09 11:52:26 -070011001 switch (type) {
11002 case CAMERA3_TEMPLATE_PREVIEW:
11003 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11004 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11005 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11006 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11007 edge_mode = ANDROID_EDGE_MODE_FAST;
11008 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11009 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11010 break;
11011 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11012 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11013 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11014 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11015 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11016 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11017 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11018 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11019 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11020 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11021 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11022 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11023 highQualityModeEntryAvailable = TRUE;
11024 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11025 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11026 fastModeEntryAvailable = TRUE;
11027 }
11028 }
11029 if (highQualityModeEntryAvailable) {
11030 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11031 } else if (fastModeEntryAvailable) {
11032 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11033 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011034 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11035 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11036 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011037 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011038 break;
11039 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11040 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11041 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11042 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011043 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11044 edge_mode = ANDROID_EDGE_MODE_FAST;
11045 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11046 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11047 if (forceVideoOis)
11048 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11049 break;
11050 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11051 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11052 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11053 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011054 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11055 edge_mode = ANDROID_EDGE_MODE_FAST;
11056 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11057 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11058 if (forceVideoOis)
11059 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11060 break;
11061 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11062 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11063 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11064 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11065 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11066 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11067 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11068 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11069 break;
11070 case CAMERA3_TEMPLATE_MANUAL:
11071 edge_mode = ANDROID_EDGE_MODE_FAST;
11072 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11073 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11074 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11075 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11076 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11077 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11078 break;
11079 default:
11080 edge_mode = ANDROID_EDGE_MODE_FAST;
11081 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11082 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11083 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11084 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11085 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11086 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11087 break;
11088 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011089 // Set CAC to OFF if underlying device doesn't support
11090 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11091 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11092 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011093 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11094 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11095 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11096 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11097 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11098 }
11099 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011100 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011101 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011102
11103 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11104 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11105 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11106 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11107 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11108 || ois_disable)
11109 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11110 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011111 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011112
11113 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11114 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11115
11116 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11117 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11118
11119 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11120 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11121
11122 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11123 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11124
11125 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11126 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11127
11128 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11129 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11130
11131 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11132 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11133
11134 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11135 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11136
11137 /*flash*/
11138 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11139 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11140
11141 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11142 settings.update(ANDROID_FLASH_FIRING_POWER,
11143 &flashFiringLevel, 1);
11144
11145 /* lens */
11146 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11147 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11148
11149 if (gCamCapability[mCameraId]->filter_densities_count) {
11150 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11151 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11152 gCamCapability[mCameraId]->filter_densities_count);
11153 }
11154
11155 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11156 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11157
Thierry Strudel3d639192016-09-09 11:52:26 -070011158 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11159 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11160
11161 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11162 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11163
11164 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11165 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11166
11167 /* face detection (default to OFF) */
11168 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11169 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11170
Thierry Strudel54dc9782017-02-15 12:12:10 -080011171 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11172 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011173
11174 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11175 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11176
11177 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11178 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11179
Thierry Strudel3d639192016-09-09 11:52:26 -070011180
11181 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11182 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11183
11184 /* Exposure time(Update the Min Exposure Time)*/
11185 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11186 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11187
11188 /* frame duration */
11189 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11190 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11191
11192 /* sensitivity */
11193 static const int32_t default_sensitivity = 100;
11194 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011195#ifndef USE_HAL_3_3
11196 static const int32_t default_isp_sensitivity =
11197 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11198 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11199#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011200
11201 /*edge mode*/
11202 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11203
11204 /*noise reduction mode*/
11205 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11206
11207 /*color correction mode*/
11208 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11209 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11210
11211 /*transform matrix mode*/
11212 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11213
11214 int32_t scaler_crop_region[4];
11215 scaler_crop_region[0] = 0;
11216 scaler_crop_region[1] = 0;
11217 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11218 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11219 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11220
11221 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11222 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11223
11224 /*focus distance*/
11225 float focus_distance = 0.0;
11226 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11227
11228 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011229 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011230 float max_range = 0.0;
11231 float max_fixed_fps = 0.0;
11232 int32_t fps_range[2] = {0, 0};
11233 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11234 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011235 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11236 TEMPLATE_MAX_PREVIEW_FPS) {
11237 continue;
11238 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011239 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11240 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11241 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11242 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11243 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11244 if (range > max_range) {
11245 fps_range[0] =
11246 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11247 fps_range[1] =
11248 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11249 max_range = range;
11250 }
11251 } else {
11252 if (range < 0.01 && max_fixed_fps <
11253 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11254 fps_range[0] =
11255 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11256 fps_range[1] =
11257 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11258 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11259 }
11260 }
11261 }
11262 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11263
11264 /*precapture trigger*/
11265 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11266 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11267
11268 /*af trigger*/
11269 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11270 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11271
11272 /* ae & af regions */
11273 int32_t active_region[] = {
11274 gCamCapability[mCameraId]->active_array_size.left,
11275 gCamCapability[mCameraId]->active_array_size.top,
11276 gCamCapability[mCameraId]->active_array_size.left +
11277 gCamCapability[mCameraId]->active_array_size.width,
11278 gCamCapability[mCameraId]->active_array_size.top +
11279 gCamCapability[mCameraId]->active_array_size.height,
11280 0};
11281 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11282 sizeof(active_region) / sizeof(active_region[0]));
11283 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11284 sizeof(active_region) / sizeof(active_region[0]));
11285
11286 /* black level lock */
11287 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11288 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11289
Thierry Strudel3d639192016-09-09 11:52:26 -070011290 //special defaults for manual template
11291 if (type == CAMERA3_TEMPLATE_MANUAL) {
11292 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11293 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11294
11295 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11296 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11297
11298 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11299 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11300
11301 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11302 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11303
11304 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11305 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11306
11307 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11308 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11309 }
11310
11311
11312 /* TNR
11313 * We'll use this location to determine which modes TNR will be set.
11314 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11315 * This is not to be confused with linking on a per stream basis that decision
11316 * is still on per-session basis and will be handled as part of config stream
11317 */
11318 uint8_t tnr_enable = 0;
11319
11320 if (m_bTnrPreview || m_bTnrVideo) {
11321
11322 switch (type) {
11323 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11324 tnr_enable = 1;
11325 break;
11326
11327 default:
11328 tnr_enable = 0;
11329 break;
11330 }
11331
11332 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11333 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11334 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11335
11336 LOGD("TNR:%d with process plate %d for template:%d",
11337 tnr_enable, tnr_process_type, type);
11338 }
11339
11340 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011341 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011342 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11343
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011344 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011345 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11346
Shuzhen Wang920ea402017-05-03 08:49:39 -070011347 uint8_t related_camera_id = mCameraId;
11348 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011349
11350 /* CDS default */
11351 char prop[PROPERTY_VALUE_MAX];
11352 memset(prop, 0, sizeof(prop));
11353 property_get("persist.camera.CDS", prop, "Auto");
11354 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11355 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11356 if (CAM_CDS_MODE_MAX == cds_mode) {
11357 cds_mode = CAM_CDS_MODE_AUTO;
11358 }
11359
11360 /* Disabling CDS in templates which have TNR enabled*/
11361 if (tnr_enable)
11362 cds_mode = CAM_CDS_MODE_OFF;
11363
11364 int32_t mode = cds_mode;
11365 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011366
Thierry Strudel269c81a2016-10-12 12:13:59 -070011367 /* Manual Convergence AEC Speed is disabled by default*/
11368 float default_aec_speed = 0;
11369 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11370
11371 /* Manual Convergence AWB Speed is disabled by default*/
11372 float default_awb_speed = 0;
11373 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11374
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011375 // Set instant AEC to normal convergence by default
11376 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11377 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11378
Shuzhen Wang19463d72016-03-08 11:09:52 -080011379 /* hybrid ae */
11380 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11381
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011382 if (gExposeEnableZslKey) {
11383 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11384 }
11385
Thierry Strudel3d639192016-09-09 11:52:26 -070011386 mDefaultMetadata[type] = settings.release();
11387
11388 return mDefaultMetadata[type];
11389}
11390
11391/*===========================================================================
11392 * FUNCTION : setFrameParameters
11393 *
11394 * DESCRIPTION: set parameters per frame as requested in the metadata from
11395 * framework
11396 *
11397 * PARAMETERS :
11398 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011399 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011400 * @blob_request: Whether this request is a blob request or not
11401 *
11402 * RETURN : success: NO_ERROR
11403 * failure:
11404 *==========================================================================*/
11405int QCamera3HardwareInterface::setFrameParameters(
11406 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011407 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011408 int blob_request,
11409 uint32_t snapshotStreamId)
11410{
11411 /*translate from camera_metadata_t type to parm_type_t*/
11412 int rc = 0;
11413 int32_t hal_version = CAM_HAL_V3;
11414
11415 clear_metadata_buffer(mParameters);
11416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11417 LOGE("Failed to set hal version in the parameters");
11418 return BAD_VALUE;
11419 }
11420
11421 /*we need to update the frame number in the parameters*/
11422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11423 request->frame_number)) {
11424 LOGE("Failed to set the frame number in the parameters");
11425 return BAD_VALUE;
11426 }
11427
11428 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011430 LOGE("Failed to set stream type mask in the parameters");
11431 return BAD_VALUE;
11432 }
11433
11434 if (mUpdateDebugLevel) {
11435 uint32_t dummyDebugLevel = 0;
11436 /* The value of dummyDebugLevel is irrelavent. On
11437 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11439 dummyDebugLevel)) {
11440 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11441 return BAD_VALUE;
11442 }
11443 mUpdateDebugLevel = false;
11444 }
11445
11446 if(request->settings != NULL){
11447 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11448 if (blob_request)
11449 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11450 }
11451
11452 return rc;
11453}
11454
11455/*===========================================================================
11456 * FUNCTION : setReprocParameters
11457 *
11458 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11459 * return it.
11460 *
11461 * PARAMETERS :
11462 * @request : request that needs to be serviced
11463 *
11464 * RETURN : success: NO_ERROR
11465 * failure:
11466 *==========================================================================*/
11467int32_t QCamera3HardwareInterface::setReprocParameters(
11468 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11469 uint32_t snapshotStreamId)
11470{
11471 /*translate from camera_metadata_t type to parm_type_t*/
11472 int rc = 0;
11473
11474 if (NULL == request->settings){
11475 LOGE("Reprocess settings cannot be NULL");
11476 return BAD_VALUE;
11477 }
11478
11479 if (NULL == reprocParam) {
11480 LOGE("Invalid reprocessing metadata buffer");
11481 return BAD_VALUE;
11482 }
11483 clear_metadata_buffer(reprocParam);
11484
11485 /*we need to update the frame number in the parameters*/
11486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11487 request->frame_number)) {
11488 LOGE("Failed to set the frame number in the parameters");
11489 return BAD_VALUE;
11490 }
11491
11492 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11493 if (rc < 0) {
11494 LOGE("Failed to translate reproc request");
11495 return rc;
11496 }
11497
11498 CameraMetadata frame_settings;
11499 frame_settings = request->settings;
11500 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11501 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11502 int32_t *crop_count =
11503 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11504 int32_t *crop_data =
11505 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11506 int32_t *roi_map =
11507 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11508 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11509 cam_crop_data_t crop_meta;
11510 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11511 crop_meta.num_of_streams = 1;
11512 crop_meta.crop_info[0].crop.left = crop_data[0];
11513 crop_meta.crop_info[0].crop.top = crop_data[1];
11514 crop_meta.crop_info[0].crop.width = crop_data[2];
11515 crop_meta.crop_info[0].crop.height = crop_data[3];
11516
11517 crop_meta.crop_info[0].roi_map.left =
11518 roi_map[0];
11519 crop_meta.crop_info[0].roi_map.top =
11520 roi_map[1];
11521 crop_meta.crop_info[0].roi_map.width =
11522 roi_map[2];
11523 crop_meta.crop_info[0].roi_map.height =
11524 roi_map[3];
11525
11526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11527 rc = BAD_VALUE;
11528 }
11529 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11530 request->input_buffer->stream,
11531 crop_meta.crop_info[0].crop.left,
11532 crop_meta.crop_info[0].crop.top,
11533 crop_meta.crop_info[0].crop.width,
11534 crop_meta.crop_info[0].crop.height);
11535 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11536 request->input_buffer->stream,
11537 crop_meta.crop_info[0].roi_map.left,
11538 crop_meta.crop_info[0].roi_map.top,
11539 crop_meta.crop_info[0].roi_map.width,
11540 crop_meta.crop_info[0].roi_map.height);
11541 } else {
11542 LOGE("Invalid reprocess crop count %d!", *crop_count);
11543 }
11544 } else {
11545 LOGE("No crop data from matching output stream");
11546 }
11547
11548 /* These settings are not needed for regular requests so handle them specially for
11549 reprocess requests; information needed for EXIF tags */
11550 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11551 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11552 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11553 if (NAME_NOT_FOUND != val) {
11554 uint32_t flashMode = (uint32_t)val;
11555 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11556 rc = BAD_VALUE;
11557 }
11558 } else {
11559 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11560 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11561 }
11562 } else {
11563 LOGH("No flash mode in reprocess settings");
11564 }
11565
11566 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11567 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11569 rc = BAD_VALUE;
11570 }
11571 } else {
11572 LOGH("No flash state in reprocess settings");
11573 }
11574
11575 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11576 uint8_t *reprocessFlags =
11577 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11578 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11579 *reprocessFlags)) {
11580 rc = BAD_VALUE;
11581 }
11582 }
11583
Thierry Strudel54dc9782017-02-15 12:12:10 -080011584 // Add exif debug data to internal metadata
11585 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11586 mm_jpeg_debug_exif_params_t *debug_params =
11587 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11588 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11589 // AE
11590 if (debug_params->ae_debug_params_valid == TRUE) {
11591 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11592 debug_params->ae_debug_params);
11593 }
11594 // AWB
11595 if (debug_params->awb_debug_params_valid == TRUE) {
11596 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11597 debug_params->awb_debug_params);
11598 }
11599 // AF
11600 if (debug_params->af_debug_params_valid == TRUE) {
11601 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11602 debug_params->af_debug_params);
11603 }
11604 // ASD
11605 if (debug_params->asd_debug_params_valid == TRUE) {
11606 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11607 debug_params->asd_debug_params);
11608 }
11609 // Stats
11610 if (debug_params->stats_debug_params_valid == TRUE) {
11611 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11612 debug_params->stats_debug_params);
11613 }
11614 // BE Stats
11615 if (debug_params->bestats_debug_params_valid == TRUE) {
11616 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11617 debug_params->bestats_debug_params);
11618 }
11619 // BHIST
11620 if (debug_params->bhist_debug_params_valid == TRUE) {
11621 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11622 debug_params->bhist_debug_params);
11623 }
11624 // 3A Tuning
11625 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11626 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11627 debug_params->q3a_tuning_debug_params);
11628 }
11629 }
11630
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011631 // Add metadata which reprocess needs
11632 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11633 cam_reprocess_info_t *repro_info =
11634 (cam_reprocess_info_t *)frame_settings.find
11635 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011636 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011637 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011638 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011639 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011640 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011641 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011642 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011643 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011644 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011645 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011646 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011647 repro_info->pipeline_flip);
11648 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11649 repro_info->af_roi);
11650 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11651 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011652 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11653 CAM_INTF_PARM_ROTATION metadata then has been added in
11654 translateToHalMetadata. HAL need to keep this new rotation
11655 metadata. Otherwise, the old rotation info saved in the vendor tag
11656 would be used */
11657 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11658 CAM_INTF_PARM_ROTATION, reprocParam) {
11659 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11660 } else {
11661 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011662 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011663 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011664 }
11665
11666 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11667 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11668 roi.width and roi.height would be the final JPEG size.
11669 For now, HAL only checks this for reprocess request */
11670 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11671 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11672 uint8_t *enable =
11673 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11674 if (*enable == TRUE) {
11675 int32_t *crop_data =
11676 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11677 cam_stream_crop_info_t crop_meta;
11678 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11679 crop_meta.stream_id = 0;
11680 crop_meta.crop.left = crop_data[0];
11681 crop_meta.crop.top = crop_data[1];
11682 crop_meta.crop.width = crop_data[2];
11683 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011684 // The JPEG crop roi should match cpp output size
11685 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11686 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11687 crop_meta.roi_map.left = 0;
11688 crop_meta.roi_map.top = 0;
11689 crop_meta.roi_map.width = cpp_crop->crop.width;
11690 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011691 }
11692 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11693 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011694 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011695 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011696 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11697 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011698 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011699 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11700
11701 // Add JPEG scale information
11702 cam_dimension_t scale_dim;
11703 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11704 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11705 int32_t *roi =
11706 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11707 scale_dim.width = roi[2];
11708 scale_dim.height = roi[3];
11709 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11710 scale_dim);
11711 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11712 scale_dim.width, scale_dim.height, mCameraId);
11713 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011714 }
11715 }
11716
11717 return rc;
11718}
11719
11720/*===========================================================================
11721 * FUNCTION : saveRequestSettings
11722 *
11723 * DESCRIPTION: Add any settings that might have changed to the request settings
11724 * and save the settings to be applied on the frame
11725 *
11726 * PARAMETERS :
11727 * @jpegMetadata : the extracted and/or modified jpeg metadata
11728 * @request : request with initial settings
11729 *
11730 * RETURN :
11731 * camera_metadata_t* : pointer to the saved request settings
11732 *==========================================================================*/
11733camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11734 const CameraMetadata &jpegMetadata,
11735 camera3_capture_request_t *request)
11736{
11737 camera_metadata_t *resultMetadata;
11738 CameraMetadata camMetadata;
11739 camMetadata = request->settings;
11740
11741 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11742 int32_t thumbnail_size[2];
11743 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11744 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11745 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11746 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11747 }
11748
11749 if (request->input_buffer != NULL) {
11750 uint8_t reprocessFlags = 1;
11751 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11752 (uint8_t*)&reprocessFlags,
11753 sizeof(reprocessFlags));
11754 }
11755
11756 resultMetadata = camMetadata.release();
11757 return resultMetadata;
11758}
11759
11760/*===========================================================================
11761 * FUNCTION : setHalFpsRange
11762 *
11763 * DESCRIPTION: set FPS range parameter
11764 *
11765 *
11766 * PARAMETERS :
11767 * @settings : Metadata from framework
11768 * @hal_metadata: Metadata buffer
11769 *
11770 *
11771 * RETURN : success: NO_ERROR
11772 * failure:
11773 *==========================================================================*/
11774int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11775 metadata_buffer_t *hal_metadata)
11776{
11777 int32_t rc = NO_ERROR;
11778 cam_fps_range_t fps_range;
11779 fps_range.min_fps = (float)
11780 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11781 fps_range.max_fps = (float)
11782 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11783 fps_range.video_min_fps = fps_range.min_fps;
11784 fps_range.video_max_fps = fps_range.max_fps;
11785
11786 LOGD("aeTargetFpsRange fps: [%f %f]",
11787 fps_range.min_fps, fps_range.max_fps);
11788 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11789 * follows:
11790 * ---------------------------------------------------------------|
11791 * Video stream is absent in configure_streams |
11792 * (Camcorder preview before the first video record |
11793 * ---------------------------------------------------------------|
11794 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11795 * | | | vid_min/max_fps|
11796 * ---------------------------------------------------------------|
11797 * NO | [ 30, 240] | 240 | [240, 240] |
11798 * |-------------|-------------|----------------|
11799 * | [240, 240] | 240 | [240, 240] |
11800 * ---------------------------------------------------------------|
11801 * Video stream is present in configure_streams |
11802 * ---------------------------------------------------------------|
11803 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11804 * | | | vid_min/max_fps|
11805 * ---------------------------------------------------------------|
11806 * NO | [ 30, 240] | 240 | [240, 240] |
11807 * (camcorder prev |-------------|-------------|----------------|
11808 * after video rec | [240, 240] | 240 | [240, 240] |
11809 * is stopped) | | | |
11810 * ---------------------------------------------------------------|
11811 * YES | [ 30, 240] | 240 | [240, 240] |
11812 * |-------------|-------------|----------------|
11813 * | [240, 240] | 240 | [240, 240] |
11814 * ---------------------------------------------------------------|
11815 * When Video stream is absent in configure_streams,
11816 * preview fps = sensor_fps / batchsize
11817 * Eg: for 240fps at batchSize 4, preview = 60fps
11818 * for 120fps at batchSize 4, preview = 30fps
11819 *
11820 * When video stream is present in configure_streams, preview fps is as per
11821 * the ratio of preview buffers to video buffers requested in process
11822 * capture request
11823 */
11824 mBatchSize = 0;
11825 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11826 fps_range.min_fps = fps_range.video_max_fps;
11827 fps_range.video_min_fps = fps_range.video_max_fps;
11828 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11829 fps_range.max_fps);
11830 if (NAME_NOT_FOUND != val) {
11831 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11833 return BAD_VALUE;
11834 }
11835
11836 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11837 /* If batchmode is currently in progress and the fps changes,
11838 * set the flag to restart the sensor */
11839 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11840 (mHFRVideoFps != fps_range.max_fps)) {
11841 mNeedSensorRestart = true;
11842 }
11843 mHFRVideoFps = fps_range.max_fps;
11844 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11845 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11846 mBatchSize = MAX_HFR_BATCH_SIZE;
11847 }
11848 }
11849 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11850
11851 }
11852 } else {
11853 /* HFR mode is session param in backend/ISP. This should be reset when
11854 * in non-HFR mode */
11855 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11857 return BAD_VALUE;
11858 }
11859 }
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11861 return BAD_VALUE;
11862 }
11863 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11864 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11865 return rc;
11866}
11867
11868/*===========================================================================
11869 * FUNCTION : translateToHalMetadata
11870 *
11871 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11872 *
11873 *
11874 * PARAMETERS :
11875 * @request : request sent from framework
11876 *
11877 *
11878 * RETURN : success: NO_ERROR
11879 * failure:
11880 *==========================================================================*/
11881int QCamera3HardwareInterface::translateToHalMetadata
11882 (const camera3_capture_request_t *request,
11883 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011884 uint32_t snapshotStreamId) {
11885 if (request == nullptr || hal_metadata == nullptr) {
11886 return BAD_VALUE;
11887 }
11888
11889 int64_t minFrameDuration = getMinFrameDuration(request);
11890
11891 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11892 minFrameDuration);
11893}
11894
11895int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11896 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11897 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11898
Thierry Strudel3d639192016-09-09 11:52:26 -070011899 int rc = 0;
11900 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011901 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011902
11903 /* Do not change the order of the following list unless you know what you are
11904 * doing.
11905 * The order is laid out in such a way that parameters in the front of the table
11906 * may be used to override the parameters later in the table. Examples are:
11907 * 1. META_MODE should precede AEC/AWB/AF MODE
11908 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11909 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11910 * 4. Any mode should precede it's corresponding settings
11911 */
11912 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11913 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11915 rc = BAD_VALUE;
11916 }
11917 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11918 if (rc != NO_ERROR) {
11919 LOGE("extractSceneMode failed");
11920 }
11921 }
11922
11923 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11924 uint8_t fwk_aeMode =
11925 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11926 uint8_t aeMode;
11927 int32_t redeye;
11928
11929 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11930 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011931 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11932 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011933 } else {
11934 aeMode = CAM_AE_MODE_ON;
11935 }
11936 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11937 redeye = 1;
11938 } else {
11939 redeye = 0;
11940 }
11941
11942 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11943 fwk_aeMode);
11944 if (NAME_NOT_FOUND != val) {
11945 int32_t flashMode = (int32_t)val;
11946 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11947 }
11948
11949 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11951 rc = BAD_VALUE;
11952 }
11953 }
11954
11955 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11956 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11957 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11958 fwk_whiteLevel);
11959 if (NAME_NOT_FOUND != val) {
11960 uint8_t whiteLevel = (uint8_t)val;
11961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11962 rc = BAD_VALUE;
11963 }
11964 }
11965 }
11966
11967 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11968 uint8_t fwk_cacMode =
11969 frame_settings.find(
11970 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11971 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11972 fwk_cacMode);
11973 if (NAME_NOT_FOUND != val) {
11974 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11975 bool entryAvailable = FALSE;
11976 // Check whether Frameworks set CAC mode is supported in device or not
11977 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11978 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11979 entryAvailable = TRUE;
11980 break;
11981 }
11982 }
11983 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11984 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11985 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11986 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11987 if (entryAvailable == FALSE) {
11988 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11989 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11990 } else {
11991 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11992 // High is not supported and so set the FAST as spec say's underlying
11993 // device implementation can be the same for both modes.
11994 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11995 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11996 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11997 // in order to avoid the fps drop due to high quality
11998 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11999 } else {
12000 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12001 }
12002 }
12003 }
12004 LOGD("Final cacMode is %d", cacMode);
12005 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12006 rc = BAD_VALUE;
12007 }
12008 } else {
12009 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12010 }
12011 }
12012
Thierry Strudel2896d122017-02-23 19:18:03 -080012013 char af_value[PROPERTY_VALUE_MAX];
12014 property_get("persist.camera.af.infinity", af_value, "0");
12015
Jason Lee84ae9972017-02-24 13:24:24 -080012016 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012017 if (atoi(af_value) == 0) {
12018 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012019 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012020 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12021 fwk_focusMode);
12022 if (NAME_NOT_FOUND != val) {
12023 uint8_t focusMode = (uint8_t)val;
12024 LOGD("set focus mode %d", focusMode);
12025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12026 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12027 rc = BAD_VALUE;
12028 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012029 }
12030 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012031 } else {
12032 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12033 LOGE("Focus forced to infinity %d", focusMode);
12034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12035 rc = BAD_VALUE;
12036 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012037 }
12038
Jason Lee84ae9972017-02-24 13:24:24 -080012039 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12040 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12043 focalDistance)) {
12044 rc = BAD_VALUE;
12045 }
12046 }
12047
12048 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12049 uint8_t fwk_antibandingMode =
12050 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12051 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12052 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12053 if (NAME_NOT_FOUND != val) {
12054 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012055 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12056 if (m60HzZone) {
12057 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12058 } else {
12059 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12060 }
12061 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12063 hal_antibandingMode)) {
12064 rc = BAD_VALUE;
12065 }
12066 }
12067 }
12068
12069 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12070 int32_t expCompensation = frame_settings.find(
12071 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12072 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12073 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12074 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12075 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012076 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12078 expCompensation)) {
12079 rc = BAD_VALUE;
12080 }
12081 }
12082
12083 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12084 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12086 rc = BAD_VALUE;
12087 }
12088 }
12089 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12090 rc = setHalFpsRange(frame_settings, hal_metadata);
12091 if (rc != NO_ERROR) {
12092 LOGE("setHalFpsRange failed");
12093 }
12094 }
12095
12096 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12097 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12099 rc = BAD_VALUE;
12100 }
12101 }
12102
12103 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12104 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12105 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12106 fwk_effectMode);
12107 if (NAME_NOT_FOUND != val) {
12108 uint8_t effectMode = (uint8_t)val;
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12110 rc = BAD_VALUE;
12111 }
12112 }
12113 }
12114
12115 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12116 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12118 colorCorrectMode)) {
12119 rc = BAD_VALUE;
12120 }
12121 }
12122
12123 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12124 cam_color_correct_gains_t colorCorrectGains;
12125 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12126 colorCorrectGains.gains[i] =
12127 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12128 }
12129 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12130 colorCorrectGains)) {
12131 rc = BAD_VALUE;
12132 }
12133 }
12134
12135 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12136 cam_color_correct_matrix_t colorCorrectTransform;
12137 cam_rational_type_t transform_elem;
12138 size_t num = 0;
12139 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12140 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12141 transform_elem.numerator =
12142 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12143 transform_elem.denominator =
12144 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12145 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12146 num++;
12147 }
12148 }
12149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12150 colorCorrectTransform)) {
12151 rc = BAD_VALUE;
12152 }
12153 }
12154
12155 cam_trigger_t aecTrigger;
12156 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12157 aecTrigger.trigger_id = -1;
12158 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12159 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12160 aecTrigger.trigger =
12161 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12162 aecTrigger.trigger_id =
12163 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12165 aecTrigger)) {
12166 rc = BAD_VALUE;
12167 }
12168 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12169 aecTrigger.trigger, aecTrigger.trigger_id);
12170 }
12171
12172 /*af_trigger must come with a trigger id*/
12173 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12174 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12175 cam_trigger_t af_trigger;
12176 af_trigger.trigger =
12177 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12178 af_trigger.trigger_id =
12179 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12181 rc = BAD_VALUE;
12182 }
12183 LOGD("AfTrigger: %d AfTriggerID: %d",
12184 af_trigger.trigger, af_trigger.trigger_id);
12185 }
12186
12187 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12188 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12190 rc = BAD_VALUE;
12191 }
12192 }
12193 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12194 cam_edge_application_t edge_application;
12195 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012196
Thierry Strudel3d639192016-09-09 11:52:26 -070012197 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12198 edge_application.sharpness = 0;
12199 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012200 edge_application.sharpness =
12201 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12202 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12203 int32_t sharpness =
12204 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12205 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12206 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12207 LOGD("Setting edge mode sharpness %d", sharpness);
12208 edge_application.sharpness = sharpness;
12209 }
12210 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012211 }
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12213 rc = BAD_VALUE;
12214 }
12215 }
12216
12217 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12218 int32_t respectFlashMode = 1;
12219 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12220 uint8_t fwk_aeMode =
12221 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012222 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12223 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12224 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012225 respectFlashMode = 0;
12226 LOGH("AE Mode controls flash, ignore android.flash.mode");
12227 }
12228 }
12229 if (respectFlashMode) {
12230 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12231 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12232 LOGH("flash mode after mapping %d", val);
12233 // To check: CAM_INTF_META_FLASH_MODE usage
12234 if (NAME_NOT_FOUND != val) {
12235 uint8_t flashMode = (uint8_t)val;
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12237 rc = BAD_VALUE;
12238 }
12239 }
12240 }
12241 }
12242
12243 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12244 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12246 rc = BAD_VALUE;
12247 }
12248 }
12249
12250 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12251 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12252 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12253 flashFiringTime)) {
12254 rc = BAD_VALUE;
12255 }
12256 }
12257
12258 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12259 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12261 hotPixelMode)) {
12262 rc = BAD_VALUE;
12263 }
12264 }
12265
12266 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12267 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12269 lensAperture)) {
12270 rc = BAD_VALUE;
12271 }
12272 }
12273
12274 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12275 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12277 filterDensity)) {
12278 rc = BAD_VALUE;
12279 }
12280 }
12281
12282 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12283 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12285 focalLength)) {
12286 rc = BAD_VALUE;
12287 }
12288 }
12289
12290 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12291 uint8_t optStabMode =
12292 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12294 optStabMode)) {
12295 rc = BAD_VALUE;
12296 }
12297 }
12298
12299 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12300 uint8_t videoStabMode =
12301 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12302 LOGD("videoStabMode from APP = %d", videoStabMode);
12303 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12304 videoStabMode)) {
12305 rc = BAD_VALUE;
12306 }
12307 }
12308
12309
12310 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12311 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12313 noiseRedMode)) {
12314 rc = BAD_VALUE;
12315 }
12316 }
12317
12318 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12319 float reprocessEffectiveExposureFactor =
12320 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12322 reprocessEffectiveExposureFactor)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326
12327 cam_crop_region_t scalerCropRegion;
12328 bool scalerCropSet = false;
12329 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12330 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12331 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12332 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12333 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12334
12335 // Map coordinate system from active array to sensor output.
12336 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12337 scalerCropRegion.width, scalerCropRegion.height);
12338
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12340 scalerCropRegion)) {
12341 rc = BAD_VALUE;
12342 }
12343 scalerCropSet = true;
12344 }
12345
12346 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12347 int64_t sensorExpTime =
12348 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12349 LOGD("setting sensorExpTime %lld", sensorExpTime);
12350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12351 sensorExpTime)) {
12352 rc = BAD_VALUE;
12353 }
12354 }
12355
12356 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12357 int64_t sensorFrameDuration =
12358 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012359 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12360 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12361 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12362 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12364 sensorFrameDuration)) {
12365 rc = BAD_VALUE;
12366 }
12367 }
12368
12369 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12370 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12371 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12372 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12373 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12374 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12375 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12377 sensorSensitivity)) {
12378 rc = BAD_VALUE;
12379 }
12380 }
12381
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012382#ifndef USE_HAL_3_3
12383 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12384 int32_t ispSensitivity =
12385 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12386 if (ispSensitivity <
12387 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12388 ispSensitivity =
12389 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12390 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12391 }
12392 if (ispSensitivity >
12393 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12394 ispSensitivity =
12395 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12396 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12397 }
12398 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12399 ispSensitivity)) {
12400 rc = BAD_VALUE;
12401 }
12402 }
12403#endif
12404
Thierry Strudel3d639192016-09-09 11:52:26 -070012405 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12406 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12408 rc = BAD_VALUE;
12409 }
12410 }
12411
12412 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12413 uint8_t fwk_facedetectMode =
12414 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12415
12416 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12417 fwk_facedetectMode);
12418
12419 if (NAME_NOT_FOUND != val) {
12420 uint8_t facedetectMode = (uint8_t)val;
12421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12422 facedetectMode)) {
12423 rc = BAD_VALUE;
12424 }
12425 }
12426 }
12427
Thierry Strudel54dc9782017-02-15 12:12:10 -080012428 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012429 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012430 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012431 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12432 histogramMode)) {
12433 rc = BAD_VALUE;
12434 }
12435 }
12436
12437 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12438 uint8_t sharpnessMapMode =
12439 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12441 sharpnessMapMode)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445
12446 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12447 uint8_t tonemapMode =
12448 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12454 /*All tonemap channels will have the same number of points*/
12455 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12456 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12457 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12458 cam_rgb_tonemap_curves tonemapCurves;
12459 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12460 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12461 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12462 tonemapCurves.tonemap_points_cnt,
12463 CAM_MAX_TONEMAP_CURVE_SIZE);
12464 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12465 }
12466
12467 /* ch0 = G*/
12468 size_t point = 0;
12469 cam_tonemap_curve_t tonemapCurveGreen;
12470 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12471 for (size_t j = 0; j < 2; j++) {
12472 tonemapCurveGreen.tonemap_points[i][j] =
12473 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12474 point++;
12475 }
12476 }
12477 tonemapCurves.curves[0] = tonemapCurveGreen;
12478
12479 /* ch 1 = B */
12480 point = 0;
12481 cam_tonemap_curve_t tonemapCurveBlue;
12482 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12483 for (size_t j = 0; j < 2; j++) {
12484 tonemapCurveBlue.tonemap_points[i][j] =
12485 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12486 point++;
12487 }
12488 }
12489 tonemapCurves.curves[1] = tonemapCurveBlue;
12490
12491 /* ch 2 = R */
12492 point = 0;
12493 cam_tonemap_curve_t tonemapCurveRed;
12494 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12495 for (size_t j = 0; j < 2; j++) {
12496 tonemapCurveRed.tonemap_points[i][j] =
12497 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12498 point++;
12499 }
12500 }
12501 tonemapCurves.curves[2] = tonemapCurveRed;
12502
12503 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12504 tonemapCurves)) {
12505 rc = BAD_VALUE;
12506 }
12507 }
12508
12509 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12510 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12512 captureIntent)) {
12513 rc = BAD_VALUE;
12514 }
12515 }
12516
12517 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12518 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12520 blackLevelLock)) {
12521 rc = BAD_VALUE;
12522 }
12523 }
12524
12525 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12526 uint8_t lensShadingMapMode =
12527 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12529 lensShadingMapMode)) {
12530 rc = BAD_VALUE;
12531 }
12532 }
12533
12534 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12535 cam_area_t roi;
12536 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012537 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012538
12539 // Map coordinate system from active array to sensor output.
12540 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12541 roi.rect.height);
12542
12543 if (scalerCropSet) {
12544 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12545 }
12546 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12547 rc = BAD_VALUE;
12548 }
12549 }
12550
12551 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12552 cam_area_t roi;
12553 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012554 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012555
12556 // Map coordinate system from active array to sensor output.
12557 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12558 roi.rect.height);
12559
12560 if (scalerCropSet) {
12561 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12562 }
12563 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12564 rc = BAD_VALUE;
12565 }
12566 }
12567
12568 // CDS for non-HFR non-video mode
12569 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12570 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12571 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12572 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12573 LOGE("Invalid CDS mode %d!", *fwk_cds);
12574 } else {
12575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12576 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12577 rc = BAD_VALUE;
12578 }
12579 }
12580 }
12581
Thierry Strudel04e026f2016-10-10 11:27:36 -070012582 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012583 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012584 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012585 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12586 }
12587 if (m_bVideoHdrEnabled)
12588 vhdr = CAM_VIDEO_HDR_MODE_ON;
12589
Thierry Strudel54dc9782017-02-15 12:12:10 -080012590 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12591
12592 if(vhdr != curr_hdr_state)
12593 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12594
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012595 rc = setVideoHdrMode(mParameters, vhdr);
12596 if (rc != NO_ERROR) {
12597 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012598 }
12599
12600 //IR
12601 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12602 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12603 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012604 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12605 uint8_t isIRon = 0;
12606
12607 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012608 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12609 LOGE("Invalid IR mode %d!", fwk_ir);
12610 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012611 if(isIRon != curr_ir_state )
12612 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12613
Thierry Strudel04e026f2016-10-10 11:27:36 -070012614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12615 CAM_INTF_META_IR_MODE, fwk_ir)) {
12616 rc = BAD_VALUE;
12617 }
12618 }
12619 }
12620
Thierry Strudel54dc9782017-02-15 12:12:10 -080012621 //Binning Correction Mode
12622 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12623 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12624 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12625 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12626 || (0 > fwk_binning_correction)) {
12627 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12628 } else {
12629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12630 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634 }
12635
Thierry Strudel269c81a2016-10-12 12:13:59 -070012636 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12637 float aec_speed;
12638 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12639 LOGD("AEC Speed :%f", aec_speed);
12640 if ( aec_speed < 0 ) {
12641 LOGE("Invalid AEC mode %f!", aec_speed);
12642 } else {
12643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12644 aec_speed)) {
12645 rc = BAD_VALUE;
12646 }
12647 }
12648 }
12649
12650 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12651 float awb_speed;
12652 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12653 LOGD("AWB Speed :%f", awb_speed);
12654 if ( awb_speed < 0 ) {
12655 LOGE("Invalid AWB mode %f!", awb_speed);
12656 } else {
12657 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12658 awb_speed)) {
12659 rc = BAD_VALUE;
12660 }
12661 }
12662 }
12663
Thierry Strudel3d639192016-09-09 11:52:26 -070012664 // TNR
12665 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12666 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12667 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012668 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012669 cam_denoise_param_t tnr;
12670 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12671 tnr.process_plates =
12672 (cam_denoise_process_type_t)frame_settings.find(
12673 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12674 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012675
12676 if(b_TnrRequested != curr_tnr_state)
12677 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12678
Thierry Strudel3d639192016-09-09 11:52:26 -070012679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12680 rc = BAD_VALUE;
12681 }
12682 }
12683
Thierry Strudel54dc9782017-02-15 12:12:10 -080012684 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012685 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012686 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12688 *exposure_metering_mode)) {
12689 rc = BAD_VALUE;
12690 }
12691 }
12692
Thierry Strudel3d639192016-09-09 11:52:26 -070012693 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12694 int32_t fwk_testPatternMode =
12695 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12696 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12697 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12698
12699 if (NAME_NOT_FOUND != testPatternMode) {
12700 cam_test_pattern_data_t testPatternData;
12701 memset(&testPatternData, 0, sizeof(testPatternData));
12702 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12703 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12704 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12705 int32_t *fwk_testPatternData =
12706 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12707 testPatternData.r = fwk_testPatternData[0];
12708 testPatternData.b = fwk_testPatternData[3];
12709 switch (gCamCapability[mCameraId]->color_arrangement) {
12710 case CAM_FILTER_ARRANGEMENT_RGGB:
12711 case CAM_FILTER_ARRANGEMENT_GRBG:
12712 testPatternData.gr = fwk_testPatternData[1];
12713 testPatternData.gb = fwk_testPatternData[2];
12714 break;
12715 case CAM_FILTER_ARRANGEMENT_GBRG:
12716 case CAM_FILTER_ARRANGEMENT_BGGR:
12717 testPatternData.gr = fwk_testPatternData[2];
12718 testPatternData.gb = fwk_testPatternData[1];
12719 break;
12720 default:
12721 LOGE("color arrangement %d is not supported",
12722 gCamCapability[mCameraId]->color_arrangement);
12723 break;
12724 }
12725 }
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12727 testPatternData)) {
12728 rc = BAD_VALUE;
12729 }
12730 } else {
12731 LOGE("Invalid framework sensor test pattern mode %d",
12732 fwk_testPatternMode);
12733 }
12734 }
12735
12736 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12737 size_t count = 0;
12738 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12739 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12740 gps_coords.data.d, gps_coords.count, count);
12741 if (gps_coords.count != count) {
12742 rc = BAD_VALUE;
12743 }
12744 }
12745
12746 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12747 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12748 size_t count = 0;
12749 const char *gps_methods_src = (const char *)
12750 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12751 memset(gps_methods, '\0', sizeof(gps_methods));
12752 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12753 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12754 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12755 if (GPS_PROCESSING_METHOD_SIZE != count) {
12756 rc = BAD_VALUE;
12757 }
12758 }
12759
12760 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12761 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12763 gps_timestamp)) {
12764 rc = BAD_VALUE;
12765 }
12766 }
12767
12768 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12769 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12770 cam_rotation_info_t rotation_info;
12771 if (orientation == 0) {
12772 rotation_info.rotation = ROTATE_0;
12773 } else if (orientation == 90) {
12774 rotation_info.rotation = ROTATE_90;
12775 } else if (orientation == 180) {
12776 rotation_info.rotation = ROTATE_180;
12777 } else if (orientation == 270) {
12778 rotation_info.rotation = ROTATE_270;
12779 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012780 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012781 rotation_info.streamId = snapshotStreamId;
12782 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12784 rc = BAD_VALUE;
12785 }
12786 }
12787
12788 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12789 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12791 rc = BAD_VALUE;
12792 }
12793 }
12794
12795 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12796 uint32_t thumb_quality = (uint32_t)
12797 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12799 thumb_quality)) {
12800 rc = BAD_VALUE;
12801 }
12802 }
12803
12804 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12805 cam_dimension_t dim;
12806 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12807 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12809 rc = BAD_VALUE;
12810 }
12811 }
12812
12813 // Internal metadata
12814 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12815 size_t count = 0;
12816 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12817 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12818 privatedata.data.i32, privatedata.count, count);
12819 if (privatedata.count != count) {
12820 rc = BAD_VALUE;
12821 }
12822 }
12823
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012824 // ISO/Exposure Priority
12825 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12826 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12827 cam_priority_mode_t mode =
12828 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12829 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12830 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12831 use_iso_exp_pty.previewOnly = FALSE;
12832 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12833 use_iso_exp_pty.value = *ptr;
12834
12835 if(CAM_ISO_PRIORITY == mode) {
12836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12837 use_iso_exp_pty)) {
12838 rc = BAD_VALUE;
12839 }
12840 }
12841 else {
12842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12843 use_iso_exp_pty)) {
12844 rc = BAD_VALUE;
12845 }
12846 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012847
12848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852 } else {
12853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12854 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012855 }
12856 }
12857
12858 // Saturation
12859 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12860 int32_t* use_saturation =
12861 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12863 rc = BAD_VALUE;
12864 }
12865 }
12866
Thierry Strudel3d639192016-09-09 11:52:26 -070012867 // EV step
12868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12869 gCamCapability[mCameraId]->exp_compensation_step)) {
12870 rc = BAD_VALUE;
12871 }
12872
12873 // CDS info
12874 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12875 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12876 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12877
12878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12879 CAM_INTF_META_CDS_DATA, *cdsData)) {
12880 rc = BAD_VALUE;
12881 }
12882 }
12883
Shuzhen Wang19463d72016-03-08 11:09:52 -080012884 // Hybrid AE
12885 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12886 uint8_t *hybrid_ae = (uint8_t *)
12887 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12888
12889 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12890 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12891 rc = BAD_VALUE;
12892 }
12893 }
12894
Shuzhen Wang14415f52016-11-16 18:26:18 -080012895 // Histogram
12896 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12897 uint8_t histogramMode =
12898 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12899 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12900 histogramMode)) {
12901 rc = BAD_VALUE;
12902 }
12903 }
12904
12905 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12906 int32_t histogramBins =
12907 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12908 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12909 histogramBins)) {
12910 rc = BAD_VALUE;
12911 }
12912 }
12913
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012914 // Tracking AF
12915 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12916 uint8_t trackingAfTrigger =
12917 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12919 trackingAfTrigger)) {
12920 rc = BAD_VALUE;
12921 }
12922 }
12923
Thierry Strudel3d639192016-09-09 11:52:26 -070012924 return rc;
12925}
12926
12927/*===========================================================================
12928 * FUNCTION : captureResultCb
12929 *
12930 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12931 *
12932 * PARAMETERS :
12933 * @frame : frame information from mm-camera-interface
12934 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12935 * @userdata: userdata
12936 *
12937 * RETURN : NONE
12938 *==========================================================================*/
12939void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12940 camera3_stream_buffer_t *buffer,
12941 uint32_t frame_number, bool isInputBuffer, void *userdata)
12942{
12943 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12944 if (hw == NULL) {
12945 LOGE("Invalid hw %p", hw);
12946 return;
12947 }
12948
12949 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12950 return;
12951}
12952
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012953/*===========================================================================
12954 * FUNCTION : setBufferErrorStatus
12955 *
12956 * DESCRIPTION: Callback handler for channels to report any buffer errors
12957 *
12958 * PARAMETERS :
12959 * @ch : Channel on which buffer error is reported from
12960 * @frame_number : frame number on which buffer error is reported on
12961 * @buffer_status : buffer error status
12962 * @userdata: userdata
12963 *
12964 * RETURN : NONE
12965 *==========================================================================*/
12966void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12967 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12968{
12969 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12970 if (hw == NULL) {
12971 LOGE("Invalid hw %p", hw);
12972 return;
12973 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012974
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012975 hw->setBufferErrorStatus(ch, frame_number, err);
12976 return;
12977}
12978
12979void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12980 uint32_t frameNumber, camera3_buffer_status_t err)
12981{
12982 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12983 pthread_mutex_lock(&mMutex);
12984
12985 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12986 if (req.frame_number != frameNumber)
12987 continue;
12988 for (auto& k : req.mPendingBufferList) {
12989 if(k.stream->priv == ch) {
12990 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12991 }
12992 }
12993 }
12994
12995 pthread_mutex_unlock(&mMutex);
12996 return;
12997}
Thierry Strudel3d639192016-09-09 11:52:26 -070012998/*===========================================================================
12999 * FUNCTION : initialize
13000 *
13001 * DESCRIPTION: Pass framework callback pointers to HAL
13002 *
13003 * PARAMETERS :
13004 *
13005 *
13006 * RETURN : Success : 0
13007 * Failure: -ENODEV
13008 *==========================================================================*/
13009
13010int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13011 const camera3_callback_ops_t *callback_ops)
13012{
13013 LOGD("E");
13014 QCamera3HardwareInterface *hw =
13015 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13016 if (!hw) {
13017 LOGE("NULL camera device");
13018 return -ENODEV;
13019 }
13020
13021 int rc = hw->initialize(callback_ops);
13022 LOGD("X");
13023 return rc;
13024}
13025
13026/*===========================================================================
13027 * FUNCTION : configure_streams
13028 *
13029 * DESCRIPTION:
13030 *
13031 * PARAMETERS :
13032 *
13033 *
13034 * RETURN : Success: 0
13035 * Failure: -EINVAL (if stream configuration is invalid)
13036 * -ENODEV (fatal error)
13037 *==========================================================================*/
13038
13039int QCamera3HardwareInterface::configure_streams(
13040 const struct camera3_device *device,
13041 camera3_stream_configuration_t *stream_list)
13042{
13043 LOGD("E");
13044 QCamera3HardwareInterface *hw =
13045 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13046 if (!hw) {
13047 LOGE("NULL camera device");
13048 return -ENODEV;
13049 }
13050 int rc = hw->configureStreams(stream_list);
13051 LOGD("X");
13052 return rc;
13053}
13054
13055/*===========================================================================
13056 * FUNCTION : construct_default_request_settings
13057 *
13058 * DESCRIPTION: Configure a settings buffer to meet the required use case
13059 *
13060 * PARAMETERS :
13061 *
13062 *
13063 * RETURN : Success: Return valid metadata
13064 * Failure: Return NULL
13065 *==========================================================================*/
13066const camera_metadata_t* QCamera3HardwareInterface::
13067 construct_default_request_settings(const struct camera3_device *device,
13068 int type)
13069{
13070
13071 LOGD("E");
13072 camera_metadata_t* fwk_metadata = NULL;
13073 QCamera3HardwareInterface *hw =
13074 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13075 if (!hw) {
13076 LOGE("NULL camera device");
13077 return NULL;
13078 }
13079
13080 fwk_metadata = hw->translateCapabilityToMetadata(type);
13081
13082 LOGD("X");
13083 return fwk_metadata;
13084}
13085
13086/*===========================================================================
13087 * FUNCTION : process_capture_request
13088 *
13089 * DESCRIPTION:
13090 *
13091 * PARAMETERS :
13092 *
13093 *
13094 * RETURN :
13095 *==========================================================================*/
13096int QCamera3HardwareInterface::process_capture_request(
13097 const struct camera3_device *device,
13098 camera3_capture_request_t *request)
13099{
13100 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013101 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013102 QCamera3HardwareInterface *hw =
13103 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13104 if (!hw) {
13105 LOGE("NULL camera device");
13106 return -EINVAL;
13107 }
13108
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013109 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013110 LOGD("X");
13111 return rc;
13112}
13113
13114/*===========================================================================
13115 * FUNCTION : dump
13116 *
13117 * DESCRIPTION:
13118 *
13119 * PARAMETERS :
13120 *
13121 *
13122 * RETURN :
13123 *==========================================================================*/
13124
13125void QCamera3HardwareInterface::dump(
13126 const struct camera3_device *device, int fd)
13127{
13128 /* Log level property is read when "adb shell dumpsys media.camera" is
13129 called so that the log level can be controlled without restarting
13130 the media server */
13131 getLogLevel();
13132
13133 LOGD("E");
13134 QCamera3HardwareInterface *hw =
13135 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13136 if (!hw) {
13137 LOGE("NULL camera device");
13138 return;
13139 }
13140
13141 hw->dump(fd);
13142 LOGD("X");
13143 return;
13144}
13145
13146/*===========================================================================
13147 * FUNCTION : flush
13148 *
13149 * DESCRIPTION:
13150 *
13151 * PARAMETERS :
13152 *
13153 *
13154 * RETURN :
13155 *==========================================================================*/
13156
13157int QCamera3HardwareInterface::flush(
13158 const struct camera3_device *device)
13159{
13160 int rc;
13161 LOGD("E");
13162 QCamera3HardwareInterface *hw =
13163 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13164 if (!hw) {
13165 LOGE("NULL camera device");
13166 return -EINVAL;
13167 }
13168
13169 pthread_mutex_lock(&hw->mMutex);
13170 // Validate current state
13171 switch (hw->mState) {
13172 case STARTED:
13173 /* valid state */
13174 break;
13175
13176 case ERROR:
13177 pthread_mutex_unlock(&hw->mMutex);
13178 hw->handleCameraDeviceError();
13179 return -ENODEV;
13180
13181 default:
13182 LOGI("Flush returned during state %d", hw->mState);
13183 pthread_mutex_unlock(&hw->mMutex);
13184 return 0;
13185 }
13186 pthread_mutex_unlock(&hw->mMutex);
13187
13188 rc = hw->flush(true /* restart channels */ );
13189 LOGD("X");
13190 return rc;
13191}
13192
13193/*===========================================================================
13194 * FUNCTION : close_camera_device
13195 *
13196 * DESCRIPTION:
13197 *
13198 * PARAMETERS :
13199 *
13200 *
13201 * RETURN :
13202 *==========================================================================*/
13203int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13204{
13205 int ret = NO_ERROR;
13206 QCamera3HardwareInterface *hw =
13207 reinterpret_cast<QCamera3HardwareInterface *>(
13208 reinterpret_cast<camera3_device_t *>(device)->priv);
13209 if (!hw) {
13210 LOGE("NULL camera device");
13211 return BAD_VALUE;
13212 }
13213
13214 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13215 delete hw;
13216 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013217 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013218 return ret;
13219}
13220
13221/*===========================================================================
13222 * FUNCTION : getWaveletDenoiseProcessPlate
13223 *
13224 * DESCRIPTION: query wavelet denoise process plate
13225 *
13226 * PARAMETERS : None
13227 *
13228 * RETURN : WNR prcocess plate value
13229 *==========================================================================*/
13230cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13231{
13232 char prop[PROPERTY_VALUE_MAX];
13233 memset(prop, 0, sizeof(prop));
13234 property_get("persist.denoise.process.plates", prop, "0");
13235 int processPlate = atoi(prop);
13236 switch(processPlate) {
13237 case 0:
13238 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13239 case 1:
13240 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13241 case 2:
13242 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13243 case 3:
13244 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13245 default:
13246 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13247 }
13248}
13249
13250
13251/*===========================================================================
13252 * FUNCTION : getTemporalDenoiseProcessPlate
13253 *
13254 * DESCRIPTION: query temporal denoise process plate
13255 *
13256 * PARAMETERS : None
13257 *
13258 * RETURN : TNR prcocess plate value
13259 *==========================================================================*/
13260cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13261{
13262 char prop[PROPERTY_VALUE_MAX];
13263 memset(prop, 0, sizeof(prop));
13264 property_get("persist.tnr.process.plates", prop, "0");
13265 int processPlate = atoi(prop);
13266 switch(processPlate) {
13267 case 0:
13268 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13269 case 1:
13270 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13271 case 2:
13272 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13273 case 3:
13274 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13275 default:
13276 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13277 }
13278}
13279
13280
13281/*===========================================================================
13282 * FUNCTION : extractSceneMode
13283 *
13284 * DESCRIPTION: Extract scene mode from frameworks set metadata
13285 *
13286 * PARAMETERS :
13287 * @frame_settings: CameraMetadata reference
13288 * @metaMode: ANDROID_CONTORL_MODE
13289 * @hal_metadata: hal metadata structure
13290 *
13291 * RETURN : None
13292 *==========================================================================*/
13293int32_t QCamera3HardwareInterface::extractSceneMode(
13294 const CameraMetadata &frame_settings, uint8_t metaMode,
13295 metadata_buffer_t *hal_metadata)
13296{
13297 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013298 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13299
13300 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13301 LOGD("Ignoring control mode OFF_KEEP_STATE");
13302 return NO_ERROR;
13303 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013304
13305 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13306 camera_metadata_ro_entry entry =
13307 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13308 if (0 == entry.count)
13309 return rc;
13310
13311 uint8_t fwk_sceneMode = entry.data.u8[0];
13312
13313 int val = lookupHalName(SCENE_MODES_MAP,
13314 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13315 fwk_sceneMode);
13316 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013317 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013318 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013319 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013320 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013321
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013322 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13323 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13324 }
13325
13326 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13327 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013328 cam_hdr_param_t hdr_params;
13329 hdr_params.hdr_enable = 1;
13330 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13331 hdr_params.hdr_need_1x = false;
13332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13333 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13334 rc = BAD_VALUE;
13335 }
13336 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013337
Thierry Strudel3d639192016-09-09 11:52:26 -070013338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13339 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13340 rc = BAD_VALUE;
13341 }
13342 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013343
13344 if (mForceHdrSnapshot) {
13345 cam_hdr_param_t hdr_params;
13346 hdr_params.hdr_enable = 1;
13347 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13348 hdr_params.hdr_need_1x = false;
13349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13350 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13351 rc = BAD_VALUE;
13352 }
13353 }
13354
Thierry Strudel3d639192016-09-09 11:52:26 -070013355 return rc;
13356}
13357
13358/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013359 * FUNCTION : setVideoHdrMode
13360 *
13361 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13362 *
13363 * PARAMETERS :
13364 * @hal_metadata: hal metadata structure
13365 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13366 *
13367 * RETURN : None
13368 *==========================================================================*/
13369int32_t QCamera3HardwareInterface::setVideoHdrMode(
13370 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13371{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013372 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13373 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13374 }
13375
13376 LOGE("Invalid Video HDR mode %d!", vhdr);
13377 return BAD_VALUE;
13378}
13379
13380/*===========================================================================
13381 * FUNCTION : setSensorHDR
13382 *
13383 * DESCRIPTION: Enable/disable sensor HDR.
13384 *
13385 * PARAMETERS :
13386 * @hal_metadata: hal metadata structure
13387 * @enable: boolean whether to enable/disable sensor HDR
13388 *
13389 * RETURN : None
13390 *==========================================================================*/
13391int32_t QCamera3HardwareInterface::setSensorHDR(
13392 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13393{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013394 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013395 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13396
13397 if (enable) {
13398 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13399 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13400 #ifdef _LE_CAMERA_
13401 //Default to staggered HDR for IOT
13402 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13403 #else
13404 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13405 #endif
13406 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13407 }
13408
13409 bool isSupported = false;
13410 switch (sensor_hdr) {
13411 case CAM_SENSOR_HDR_IN_SENSOR:
13412 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13413 CAM_QCOM_FEATURE_SENSOR_HDR) {
13414 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013415 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013416 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013417 break;
13418 case CAM_SENSOR_HDR_ZIGZAG:
13419 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13420 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13421 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013422 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013423 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013424 break;
13425 case CAM_SENSOR_HDR_STAGGERED:
13426 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13427 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13428 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013429 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013430 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013431 break;
13432 case CAM_SENSOR_HDR_OFF:
13433 isSupported = true;
13434 LOGD("Turning off sensor HDR");
13435 break;
13436 default:
13437 LOGE("HDR mode %d not supported", sensor_hdr);
13438 rc = BAD_VALUE;
13439 break;
13440 }
13441
13442 if(isSupported) {
13443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13444 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13445 rc = BAD_VALUE;
13446 } else {
13447 if(!isVideoHdrEnable)
13448 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013449 }
13450 }
13451 return rc;
13452}
13453
13454/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013455 * FUNCTION : needRotationReprocess
13456 *
13457 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13458 *
13459 * PARAMETERS : none
13460 *
13461 * RETURN : true: needed
13462 * false: no need
13463 *==========================================================================*/
13464bool QCamera3HardwareInterface::needRotationReprocess()
13465{
13466 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13467 // current rotation is not zero, and pp has the capability to process rotation
13468 LOGH("need do reprocess for rotation");
13469 return true;
13470 }
13471
13472 return false;
13473}
13474
13475/*===========================================================================
13476 * FUNCTION : needReprocess
13477 *
13478 * DESCRIPTION: if reprocess in needed
13479 *
13480 * PARAMETERS : none
13481 *
13482 * RETURN : true: needed
13483 * false: no need
13484 *==========================================================================*/
13485bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13486{
13487 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13488 // TODO: add for ZSL HDR later
13489 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13490 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13491 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13492 return true;
13493 } else {
13494 LOGH("already post processed frame");
13495 return false;
13496 }
13497 }
13498 return needRotationReprocess();
13499}
13500
13501/*===========================================================================
13502 * FUNCTION : needJpegExifRotation
13503 *
13504 * DESCRIPTION: if rotation from jpeg is needed
13505 *
13506 * PARAMETERS : none
13507 *
13508 * RETURN : true: needed
13509 * false: no need
13510 *==========================================================================*/
13511bool QCamera3HardwareInterface::needJpegExifRotation()
13512{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013513 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013514 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13515 LOGD("Need use Jpeg EXIF Rotation");
13516 return true;
13517 }
13518 return false;
13519}
13520
13521/*===========================================================================
13522 * FUNCTION : addOfflineReprocChannel
13523 *
13524 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13525 * coming from input channel
13526 *
13527 * PARAMETERS :
13528 * @config : reprocess configuration
13529 * @inputChHandle : pointer to the input (source) channel
13530 *
13531 *
13532 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13533 *==========================================================================*/
13534QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13535 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13536{
13537 int32_t rc = NO_ERROR;
13538 QCamera3ReprocessChannel *pChannel = NULL;
13539
13540 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013541 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13542 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013543 if (NULL == pChannel) {
13544 LOGE("no mem for reprocess channel");
13545 return NULL;
13546 }
13547
13548 rc = pChannel->initialize(IS_TYPE_NONE);
13549 if (rc != NO_ERROR) {
13550 LOGE("init reprocess channel failed, ret = %d", rc);
13551 delete pChannel;
13552 return NULL;
13553 }
13554
13555 // pp feature config
13556 cam_pp_feature_config_t pp_config;
13557 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13558
13559 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13560 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13561 & CAM_QCOM_FEATURE_DSDN) {
13562 //Use CPP CDS incase h/w supports it.
13563 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13564 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13565 }
13566 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13567 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13568 }
13569
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013570 if (config.hdr_param.hdr_enable) {
13571 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13572 pp_config.hdr_param = config.hdr_param;
13573 }
13574
13575 if (mForceHdrSnapshot) {
13576 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13577 pp_config.hdr_param.hdr_enable = 1;
13578 pp_config.hdr_param.hdr_need_1x = 0;
13579 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13580 }
13581
Thierry Strudel3d639192016-09-09 11:52:26 -070013582 rc = pChannel->addReprocStreamsFromSource(pp_config,
13583 config,
13584 IS_TYPE_NONE,
13585 mMetadataChannel);
13586
13587 if (rc != NO_ERROR) {
13588 delete pChannel;
13589 return NULL;
13590 }
13591 return pChannel;
13592}
13593
13594/*===========================================================================
13595 * FUNCTION : getMobicatMask
13596 *
13597 * DESCRIPTION: returns mobicat mask
13598 *
13599 * PARAMETERS : none
13600 *
13601 * RETURN : mobicat mask
13602 *
13603 *==========================================================================*/
13604uint8_t QCamera3HardwareInterface::getMobicatMask()
13605{
13606 return m_MobicatMask;
13607}
13608
13609/*===========================================================================
13610 * FUNCTION : setMobicat
13611 *
13612 * DESCRIPTION: set Mobicat on/off.
13613 *
13614 * PARAMETERS :
13615 * @params : none
13616 *
13617 * RETURN : int32_t type of status
13618 * NO_ERROR -- success
13619 * none-zero failure code
13620 *==========================================================================*/
13621int32_t QCamera3HardwareInterface::setMobicat()
13622{
13623 char value [PROPERTY_VALUE_MAX];
13624 property_get("persist.camera.mobicat", value, "0");
13625 int32_t ret = NO_ERROR;
13626 uint8_t enableMobi = (uint8_t)atoi(value);
13627
13628 if (enableMobi) {
13629 tune_cmd_t tune_cmd;
13630 tune_cmd.type = SET_RELOAD_CHROMATIX;
13631 tune_cmd.module = MODULE_ALL;
13632 tune_cmd.value = TRUE;
13633 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13634 CAM_INTF_PARM_SET_VFE_COMMAND,
13635 tune_cmd);
13636
13637 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13638 CAM_INTF_PARM_SET_PP_COMMAND,
13639 tune_cmd);
13640 }
13641 m_MobicatMask = enableMobi;
13642
13643 return ret;
13644}
13645
13646/*===========================================================================
13647* FUNCTION : getLogLevel
13648*
13649* DESCRIPTION: Reads the log level property into a variable
13650*
13651* PARAMETERS :
13652* None
13653*
13654* RETURN :
13655* None
13656*==========================================================================*/
13657void QCamera3HardwareInterface::getLogLevel()
13658{
13659 char prop[PROPERTY_VALUE_MAX];
13660 uint32_t globalLogLevel = 0;
13661
13662 property_get("persist.camera.hal.debug", prop, "0");
13663 int val = atoi(prop);
13664 if (0 <= val) {
13665 gCamHal3LogLevel = (uint32_t)val;
13666 }
13667
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013668 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013669 gKpiDebugLevel = atoi(prop);
13670
13671 property_get("persist.camera.global.debug", prop, "0");
13672 val = atoi(prop);
13673 if (0 <= val) {
13674 globalLogLevel = (uint32_t)val;
13675 }
13676
13677 /* Highest log level among hal.logs and global.logs is selected */
13678 if (gCamHal3LogLevel < globalLogLevel)
13679 gCamHal3LogLevel = globalLogLevel;
13680
13681 return;
13682}
13683
13684/*===========================================================================
13685 * FUNCTION : validateStreamRotations
13686 *
13687 * DESCRIPTION: Check if the rotations requested are supported
13688 *
13689 * PARAMETERS :
13690 * @stream_list : streams to be configured
13691 *
13692 * RETURN : NO_ERROR on success
13693 * -EINVAL on failure
13694 *
13695 *==========================================================================*/
13696int QCamera3HardwareInterface::validateStreamRotations(
13697 camera3_stream_configuration_t *streamList)
13698{
13699 int rc = NO_ERROR;
13700
13701 /*
13702 * Loop through all streams requested in configuration
13703 * Check if unsupported rotations have been requested on any of them
13704 */
13705 for (size_t j = 0; j < streamList->num_streams; j++){
13706 camera3_stream_t *newStream = streamList->streams[j];
13707
Emilian Peev35ceeed2017-06-29 11:58:56 -070013708 switch(newStream->rotation) {
13709 case CAMERA3_STREAM_ROTATION_0:
13710 case CAMERA3_STREAM_ROTATION_90:
13711 case CAMERA3_STREAM_ROTATION_180:
13712 case CAMERA3_STREAM_ROTATION_270:
13713 //Expected values
13714 break;
13715 default:
13716 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13717 "type:%d and stream format:%d", __func__,
13718 newStream->rotation, newStream->stream_type,
13719 newStream->format);
13720 return -EINVAL;
13721 }
13722
Thierry Strudel3d639192016-09-09 11:52:26 -070013723 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13724 bool isImplDef = (newStream->format ==
13725 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13726 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13727 isImplDef);
13728
13729 if (isRotated && (!isImplDef || isZsl)) {
13730 LOGE("Error: Unsupported rotation of %d requested for stream"
13731 "type:%d and stream format:%d",
13732 newStream->rotation, newStream->stream_type,
13733 newStream->format);
13734 rc = -EINVAL;
13735 break;
13736 }
13737 }
13738
13739 return rc;
13740}
13741
13742/*===========================================================================
13743* FUNCTION : getFlashInfo
13744*
13745* DESCRIPTION: Retrieve information about whether the device has a flash.
13746*
13747* PARAMETERS :
13748* @cameraId : Camera id to query
13749* @hasFlash : Boolean indicating whether there is a flash device
13750* associated with given camera
13751* @flashNode : If a flash device exists, this will be its device node.
13752*
13753* RETURN :
13754* None
13755*==========================================================================*/
13756void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13757 bool& hasFlash,
13758 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13759{
13760 cam_capability_t* camCapability = gCamCapability[cameraId];
13761 if (NULL == camCapability) {
13762 hasFlash = false;
13763 flashNode[0] = '\0';
13764 } else {
13765 hasFlash = camCapability->flash_available;
13766 strlcpy(flashNode,
13767 (char*)camCapability->flash_dev_name,
13768 QCAMERA_MAX_FILEPATH_LENGTH);
13769 }
13770}
13771
13772/*===========================================================================
13773* FUNCTION : getEepromVersionInfo
13774*
13775* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13776*
13777* PARAMETERS : None
13778*
13779* RETURN : string describing EEPROM version
13780* "\0" if no such info available
13781*==========================================================================*/
13782const char *QCamera3HardwareInterface::getEepromVersionInfo()
13783{
13784 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13785}
13786
13787/*===========================================================================
13788* FUNCTION : getLdafCalib
13789*
13790* DESCRIPTION: Retrieve Laser AF calibration data
13791*
13792* PARAMETERS : None
13793*
13794* RETURN : Two uint32_t describing laser AF calibration data
13795* NULL if none is available.
13796*==========================================================================*/
13797const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13798{
13799 if (mLdafCalibExist) {
13800 return &mLdafCalib[0];
13801 } else {
13802 return NULL;
13803 }
13804}
13805
13806/*===========================================================================
13807 * FUNCTION : dynamicUpdateMetaStreamInfo
13808 *
13809 * DESCRIPTION: This function:
13810 * (1) stops all the channels
13811 * (2) returns error on pending requests and buffers
13812 * (3) sends metastream_info in setparams
13813 * (4) starts all channels
13814 * This is useful when sensor has to be restarted to apply any
13815 * settings such as frame rate from a different sensor mode
13816 *
13817 * PARAMETERS : None
13818 *
13819 * RETURN : NO_ERROR on success
13820 * Error codes on failure
13821 *
13822 *==========================================================================*/
13823int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13824{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013825 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013826 int rc = NO_ERROR;
13827
13828 LOGD("E");
13829
13830 rc = stopAllChannels();
13831 if (rc < 0) {
13832 LOGE("stopAllChannels failed");
13833 return rc;
13834 }
13835
13836 rc = notifyErrorForPendingRequests();
13837 if (rc < 0) {
13838 LOGE("notifyErrorForPendingRequests failed");
13839 return rc;
13840 }
13841
13842 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13843 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13844 "Format:%d",
13845 mStreamConfigInfo.type[i],
13846 mStreamConfigInfo.stream_sizes[i].width,
13847 mStreamConfigInfo.stream_sizes[i].height,
13848 mStreamConfigInfo.postprocess_mask[i],
13849 mStreamConfigInfo.format[i]);
13850 }
13851
13852 /* Send meta stream info once again so that ISP can start */
13853 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13854 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13855 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13856 mParameters);
13857 if (rc < 0) {
13858 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13859 }
13860
13861 rc = startAllChannels();
13862 if (rc < 0) {
13863 LOGE("startAllChannels failed");
13864 return rc;
13865 }
13866
13867 LOGD("X");
13868 return rc;
13869}
13870
13871/*===========================================================================
13872 * FUNCTION : stopAllChannels
13873 *
13874 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13875 *
13876 * PARAMETERS : None
13877 *
13878 * RETURN : NO_ERROR on success
13879 * Error codes on failure
13880 *
13881 *==========================================================================*/
13882int32_t QCamera3HardwareInterface::stopAllChannels()
13883{
13884 int32_t rc = NO_ERROR;
13885
13886 LOGD("Stopping all channels");
13887 // Stop the Streams/Channels
13888 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13889 it != mStreamInfo.end(); it++) {
13890 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13891 if (channel) {
13892 channel->stop();
13893 }
13894 (*it)->status = INVALID;
13895 }
13896
13897 if (mSupportChannel) {
13898 mSupportChannel->stop();
13899 }
13900 if (mAnalysisChannel) {
13901 mAnalysisChannel->stop();
13902 }
13903 if (mRawDumpChannel) {
13904 mRawDumpChannel->stop();
13905 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013906 if (mHdrPlusRawSrcChannel) {
13907 mHdrPlusRawSrcChannel->stop();
13908 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013909 if (mMetadataChannel) {
13910 /* If content of mStreamInfo is not 0, there is metadata stream */
13911 mMetadataChannel->stop();
13912 }
13913
13914 LOGD("All channels stopped");
13915 return rc;
13916}
13917
13918/*===========================================================================
13919 * FUNCTION : startAllChannels
13920 *
13921 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13922 *
13923 * PARAMETERS : None
13924 *
13925 * RETURN : NO_ERROR on success
13926 * Error codes on failure
13927 *
13928 *==========================================================================*/
13929int32_t QCamera3HardwareInterface::startAllChannels()
13930{
13931 int32_t rc = NO_ERROR;
13932
13933 LOGD("Start all channels ");
13934 // Start the Streams/Channels
13935 if (mMetadataChannel) {
13936 /* If content of mStreamInfo is not 0, there is metadata stream */
13937 rc = mMetadataChannel->start();
13938 if (rc < 0) {
13939 LOGE("META channel start failed");
13940 return rc;
13941 }
13942 }
13943 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13944 it != mStreamInfo.end(); it++) {
13945 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13946 if (channel) {
13947 rc = channel->start();
13948 if (rc < 0) {
13949 LOGE("channel start failed");
13950 return rc;
13951 }
13952 }
13953 }
13954 if (mAnalysisChannel) {
13955 mAnalysisChannel->start();
13956 }
13957 if (mSupportChannel) {
13958 rc = mSupportChannel->start();
13959 if (rc < 0) {
13960 LOGE("Support channel start failed");
13961 return rc;
13962 }
13963 }
13964 if (mRawDumpChannel) {
13965 rc = mRawDumpChannel->start();
13966 if (rc < 0) {
13967 LOGE("RAW dump channel start failed");
13968 return rc;
13969 }
13970 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013971 if (mHdrPlusRawSrcChannel) {
13972 rc = mHdrPlusRawSrcChannel->start();
13973 if (rc < 0) {
13974 LOGE("HDR+ RAW channel start failed");
13975 return rc;
13976 }
13977 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013978
13979 LOGD("All channels started");
13980 return rc;
13981}
13982
13983/*===========================================================================
13984 * FUNCTION : notifyErrorForPendingRequests
13985 *
13986 * DESCRIPTION: This function sends error for all the pending requests/buffers
13987 *
13988 * PARAMETERS : None
13989 *
13990 * RETURN : Error codes
13991 * NO_ERROR on success
13992 *
13993 *==========================================================================*/
13994int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13995{
Emilian Peev7650c122017-01-19 08:24:33 -080013996 notifyErrorFoPendingDepthData(mDepthChannel);
13997
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013998 auto pendingRequest = mPendingRequestsList.begin();
13999 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014000
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014001 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14002 // buffers (for which buffers aren't sent yet).
14003 while (pendingRequest != mPendingRequestsList.end() ||
14004 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14005 if (pendingRequest == mPendingRequestsList.end() ||
14006 pendingBuffer->frame_number < pendingRequest->frame_number) {
14007 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14008 // with error.
14009 for (auto &info : pendingBuffer->mPendingBufferList) {
14010 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014011 camera3_notify_msg_t notify_msg;
14012 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14013 notify_msg.type = CAMERA3_MSG_ERROR;
14014 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014015 notify_msg.message.error.error_stream = info.stream;
14016 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014017 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014018
14019 camera3_stream_buffer_t buffer = {};
14020 buffer.acquire_fence = -1;
14021 buffer.release_fence = -1;
14022 buffer.buffer = info.buffer;
14023 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14024 buffer.stream = info.stream;
14025 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014026 }
14027
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014028 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14029 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14030 pendingBuffer->frame_number > pendingRequest->frame_number) {
14031 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014032 camera3_notify_msg_t notify_msg;
14033 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14034 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014035 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14036 notify_msg.message.error.error_stream = nullptr;
14037 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014038 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014039
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014040 if (pendingRequest->input_buffer != nullptr) {
14041 camera3_capture_result result = {};
14042 result.frame_number = pendingRequest->frame_number;
14043 result.result = nullptr;
14044 result.input_buffer = pendingRequest->input_buffer;
14045 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014046 }
14047
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014048 mShutterDispatcher.clear(pendingRequest->frame_number);
14049 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14050 } else {
14051 // If both buffers and result metadata weren't sent yet, notify about a request error
14052 // and return buffers with error.
14053 for (auto &info : pendingBuffer->mPendingBufferList) {
14054 camera3_notify_msg_t notify_msg;
14055 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14056 notify_msg.type = CAMERA3_MSG_ERROR;
14057 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14058 notify_msg.message.error.error_stream = info.stream;
14059 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14060 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014061
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014062 camera3_stream_buffer_t buffer = {};
14063 buffer.acquire_fence = -1;
14064 buffer.release_fence = -1;
14065 buffer.buffer = info.buffer;
14066 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14067 buffer.stream = info.stream;
14068 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14069 }
14070
14071 if (pendingRequest->input_buffer != nullptr) {
14072 camera3_capture_result result = {};
14073 result.frame_number = pendingRequest->frame_number;
14074 result.result = nullptr;
14075 result.input_buffer = pendingRequest->input_buffer;
14076 orchestrateResult(&result);
14077 }
14078
14079 mShutterDispatcher.clear(pendingRequest->frame_number);
14080 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14081 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014082 }
14083 }
14084
14085 /* Reset pending frame Drop list and requests list */
14086 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014087 mShutterDispatcher.clear();
14088 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014089 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014090 LOGH("Cleared all the pending buffers ");
14091
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014092 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014093}
14094
14095bool QCamera3HardwareInterface::isOnEncoder(
14096 const cam_dimension_t max_viewfinder_size,
14097 uint32_t width, uint32_t height)
14098{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014099 return ((width > (uint32_t)max_viewfinder_size.width) ||
14100 (height > (uint32_t)max_viewfinder_size.height) ||
14101 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14102 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014103}
14104
14105/*===========================================================================
14106 * FUNCTION : setBundleInfo
14107 *
14108 * DESCRIPTION: Set bundle info for all streams that are bundle.
14109 *
14110 * PARAMETERS : None
14111 *
14112 * RETURN : NO_ERROR on success
14113 * Error codes on failure
14114 *==========================================================================*/
14115int32_t QCamera3HardwareInterface::setBundleInfo()
14116{
14117 int32_t rc = NO_ERROR;
14118
14119 if (mChannelHandle) {
14120 cam_bundle_config_t bundleInfo;
14121 memset(&bundleInfo, 0, sizeof(bundleInfo));
14122 rc = mCameraHandle->ops->get_bundle_info(
14123 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14124 if (rc != NO_ERROR) {
14125 LOGE("get_bundle_info failed");
14126 return rc;
14127 }
14128 if (mAnalysisChannel) {
14129 mAnalysisChannel->setBundleInfo(bundleInfo);
14130 }
14131 if (mSupportChannel) {
14132 mSupportChannel->setBundleInfo(bundleInfo);
14133 }
14134 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14135 it != mStreamInfo.end(); it++) {
14136 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14137 channel->setBundleInfo(bundleInfo);
14138 }
14139 if (mRawDumpChannel) {
14140 mRawDumpChannel->setBundleInfo(bundleInfo);
14141 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014142 if (mHdrPlusRawSrcChannel) {
14143 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14144 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014145 }
14146
14147 return rc;
14148}
14149
14150/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014151 * FUNCTION : setInstantAEC
14152 *
14153 * DESCRIPTION: Set Instant AEC related params.
14154 *
14155 * PARAMETERS :
14156 * @meta: CameraMetadata reference
14157 *
14158 * RETURN : NO_ERROR on success
14159 * Error codes on failure
14160 *==========================================================================*/
14161int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14162{
14163 int32_t rc = NO_ERROR;
14164 uint8_t val = 0;
14165 char prop[PROPERTY_VALUE_MAX];
14166
14167 // First try to configure instant AEC from framework metadata
14168 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14169 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14170 }
14171
14172 // If framework did not set this value, try to read from set prop.
14173 if (val == 0) {
14174 memset(prop, 0, sizeof(prop));
14175 property_get("persist.camera.instant.aec", prop, "0");
14176 val = (uint8_t)atoi(prop);
14177 }
14178
14179 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14180 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14181 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14182 mInstantAEC = val;
14183 mInstantAECSettledFrameNumber = 0;
14184 mInstantAecFrameIdxCount = 0;
14185 LOGH("instantAEC value set %d",val);
14186 if (mInstantAEC) {
14187 memset(prop, 0, sizeof(prop));
14188 property_get("persist.camera.ae.instant.bound", prop, "10");
14189 int32_t aec_frame_skip_cnt = atoi(prop);
14190 if (aec_frame_skip_cnt >= 0) {
14191 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14192 } else {
14193 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14194 rc = BAD_VALUE;
14195 }
14196 }
14197 } else {
14198 LOGE("Bad instant aec value set %d", val);
14199 rc = BAD_VALUE;
14200 }
14201 return rc;
14202}
14203
14204/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014205 * FUNCTION : get_num_overall_buffers
14206 *
14207 * DESCRIPTION: Estimate number of pending buffers across all requests.
14208 *
14209 * PARAMETERS : None
14210 *
14211 * RETURN : Number of overall pending buffers
14212 *
14213 *==========================================================================*/
14214uint32_t PendingBuffersMap::get_num_overall_buffers()
14215{
14216 uint32_t sum_buffers = 0;
14217 for (auto &req : mPendingBuffersInRequest) {
14218 sum_buffers += req.mPendingBufferList.size();
14219 }
14220 return sum_buffers;
14221}
14222
14223/*===========================================================================
14224 * FUNCTION : removeBuf
14225 *
14226 * DESCRIPTION: Remove a matching buffer from tracker.
14227 *
14228 * PARAMETERS : @buffer: image buffer for the callback
14229 *
14230 * RETURN : None
14231 *
14232 *==========================================================================*/
14233void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14234{
14235 bool buffer_found = false;
14236 for (auto req = mPendingBuffersInRequest.begin();
14237 req != mPendingBuffersInRequest.end(); req++) {
14238 for (auto k = req->mPendingBufferList.begin();
14239 k != req->mPendingBufferList.end(); k++ ) {
14240 if (k->buffer == buffer) {
14241 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14242 req->frame_number, buffer);
14243 k = req->mPendingBufferList.erase(k);
14244 if (req->mPendingBufferList.empty()) {
14245 // Remove this request from Map
14246 req = mPendingBuffersInRequest.erase(req);
14247 }
14248 buffer_found = true;
14249 break;
14250 }
14251 }
14252 if (buffer_found) {
14253 break;
14254 }
14255 }
14256 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14257 get_num_overall_buffers());
14258}
14259
14260/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014261 * FUNCTION : getBufErrStatus
14262 *
14263 * DESCRIPTION: get buffer error status
14264 *
14265 * PARAMETERS : @buffer: buffer handle
14266 *
14267 * RETURN : Error status
14268 *
14269 *==========================================================================*/
14270int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14271{
14272 for (auto& req : mPendingBuffersInRequest) {
14273 for (auto& k : req.mPendingBufferList) {
14274 if (k.buffer == buffer)
14275 return k.bufStatus;
14276 }
14277 }
14278 return CAMERA3_BUFFER_STATUS_OK;
14279}
14280
14281/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014282 * FUNCTION : setPAAFSupport
14283 *
14284 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14285 * feature mask according to stream type and filter
14286 * arrangement
14287 *
14288 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14289 * @stream_type: stream type
14290 * @filter_arrangement: filter arrangement
14291 *
14292 * RETURN : None
14293 *==========================================================================*/
14294void QCamera3HardwareInterface::setPAAFSupport(
14295 cam_feature_mask_t& feature_mask,
14296 cam_stream_type_t stream_type,
14297 cam_color_filter_arrangement_t filter_arrangement)
14298{
Thierry Strudel3d639192016-09-09 11:52:26 -070014299 switch (filter_arrangement) {
14300 case CAM_FILTER_ARRANGEMENT_RGGB:
14301 case CAM_FILTER_ARRANGEMENT_GRBG:
14302 case CAM_FILTER_ARRANGEMENT_GBRG:
14303 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014304 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14305 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014306 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014307 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14308 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014309 }
14310 break;
14311 case CAM_FILTER_ARRANGEMENT_Y:
14312 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14313 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14314 }
14315 break;
14316 default:
14317 break;
14318 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014319 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14320 feature_mask, stream_type, filter_arrangement);
14321
14322
Thierry Strudel3d639192016-09-09 11:52:26 -070014323}
14324
14325/*===========================================================================
14326* FUNCTION : getSensorMountAngle
14327*
14328* DESCRIPTION: Retrieve sensor mount angle
14329*
14330* PARAMETERS : None
14331*
14332* RETURN : sensor mount angle in uint32_t
14333*==========================================================================*/
14334uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14335{
14336 return gCamCapability[mCameraId]->sensor_mount_angle;
14337}
14338
14339/*===========================================================================
14340* FUNCTION : getRelatedCalibrationData
14341*
14342* DESCRIPTION: Retrieve related system calibration data
14343*
14344* PARAMETERS : None
14345*
14346* RETURN : Pointer of related system calibration data
14347*==========================================================================*/
14348const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14349{
14350 return (const cam_related_system_calibration_data_t *)
14351 &(gCamCapability[mCameraId]->related_cam_calibration);
14352}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014353
14354/*===========================================================================
14355 * FUNCTION : is60HzZone
14356 *
14357 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14358 *
14359 * PARAMETERS : None
14360 *
14361 * RETURN : True if in 60Hz zone, False otherwise
14362 *==========================================================================*/
14363bool QCamera3HardwareInterface::is60HzZone()
14364{
14365 time_t t = time(NULL);
14366 struct tm lt;
14367
14368 struct tm* r = localtime_r(&t, &lt);
14369
14370 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14371 return true;
14372 else
14373 return false;
14374}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014375
14376/*===========================================================================
14377 * FUNCTION : adjustBlackLevelForCFA
14378 *
14379 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14380 * of bayer CFA (Color Filter Array).
14381 *
14382 * PARAMETERS : @input: black level pattern in the order of RGGB
14383 * @output: black level pattern in the order of CFA
14384 * @color_arrangement: CFA color arrangement
14385 *
14386 * RETURN : None
14387 *==========================================================================*/
14388template<typename T>
14389void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14390 T input[BLACK_LEVEL_PATTERN_CNT],
14391 T output[BLACK_LEVEL_PATTERN_CNT],
14392 cam_color_filter_arrangement_t color_arrangement)
14393{
14394 switch (color_arrangement) {
14395 case CAM_FILTER_ARRANGEMENT_GRBG:
14396 output[0] = input[1];
14397 output[1] = input[0];
14398 output[2] = input[3];
14399 output[3] = input[2];
14400 break;
14401 case CAM_FILTER_ARRANGEMENT_GBRG:
14402 output[0] = input[2];
14403 output[1] = input[3];
14404 output[2] = input[0];
14405 output[3] = input[1];
14406 break;
14407 case CAM_FILTER_ARRANGEMENT_BGGR:
14408 output[0] = input[3];
14409 output[1] = input[2];
14410 output[2] = input[1];
14411 output[3] = input[0];
14412 break;
14413 case CAM_FILTER_ARRANGEMENT_RGGB:
14414 output[0] = input[0];
14415 output[1] = input[1];
14416 output[2] = input[2];
14417 output[3] = input[3];
14418 break;
14419 default:
14420 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14421 break;
14422 }
14423}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014424
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014425void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14426 CameraMetadata &resultMetadata,
14427 std::shared_ptr<metadata_buffer_t> settings)
14428{
14429 if (settings == nullptr) {
14430 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14431 return;
14432 }
14433
14434 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14435 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14436 }
14437
14438 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14439 String8 str((const char *)gps_methods);
14440 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14441 }
14442
14443 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14444 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14445 }
14446
14447 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14448 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14449 }
14450
14451 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14452 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14453 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14454 }
14455
14456 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14457 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14458 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14459 }
14460
14461 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14462 int32_t fwk_thumb_size[2];
14463 fwk_thumb_size[0] = thumb_size->width;
14464 fwk_thumb_size[1] = thumb_size->height;
14465 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14466 }
14467
14468 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14469 uint8_t fwk_intent = intent[0];
14470 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14471 }
14472}
14473
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014474bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14475 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14476 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014477{
14478 if (hdrPlusRequest == nullptr) return false;
14479
14480 // Check noise reduction mode is high quality.
14481 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14482 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14483 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014484 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14485 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014486 return false;
14487 }
14488
14489 // Check edge mode is high quality.
14490 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14491 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14492 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14493 return false;
14494 }
14495
14496 if (request.num_output_buffers != 1 ||
14497 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14498 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014499 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14500 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14501 request.output_buffers[0].stream->width,
14502 request.output_buffers[0].stream->height,
14503 request.output_buffers[0].stream->format);
14504 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014505 return false;
14506 }
14507
14508 // Get a YUV buffer from pic channel.
14509 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14510 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14511 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14512 if (res != OK) {
14513 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14514 __FUNCTION__, strerror(-res), res);
14515 return false;
14516 }
14517
14518 pbcamera::StreamBuffer buffer;
14519 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014520 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014521 buffer.data = yuvBuffer->buffer;
14522 buffer.dataSize = yuvBuffer->frame_len;
14523
14524 pbcamera::CaptureRequest pbRequest;
14525 pbRequest.id = request.frame_number;
14526 pbRequest.outputBuffers.push_back(buffer);
14527
14528 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014529 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014530 if (res != OK) {
14531 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14532 strerror(-res), res);
14533 return false;
14534 }
14535
14536 hdrPlusRequest->yuvBuffer = yuvBuffer;
14537 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14538
14539 return true;
14540}
14541
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014542status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14543{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014544 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14545 return OK;
14546 }
14547
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014548 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014549 if (res != OK) {
14550 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14551 strerror(-res), res);
14552 return res;
14553 }
14554 gHdrPlusClientOpening = true;
14555
14556 return OK;
14557}
14558
Chien-Yu Chenee335912017-02-09 17:53:20 -080014559status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14560{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014561 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014562
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014563 if (mHdrPlusModeEnabled) {
14564 return OK;
14565 }
14566
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014567 // Check if gHdrPlusClient is opened or being opened.
14568 if (gHdrPlusClient == nullptr) {
14569 if (gHdrPlusClientOpening) {
14570 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14571 return OK;
14572 }
14573
14574 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014575 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014576 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14577 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014578 return res;
14579 }
14580
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014581 // When opening HDR+ client completes, HDR+ mode will be enabled.
14582 return OK;
14583
Chien-Yu Chenee335912017-02-09 17:53:20 -080014584 }
14585
14586 // Configure stream for HDR+.
14587 res = configureHdrPlusStreamsLocked();
14588 if (res != OK) {
14589 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014590 return res;
14591 }
14592
14593 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14594 res = gHdrPlusClient->setZslHdrPlusMode(true);
14595 if (res != OK) {
14596 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014597 return res;
14598 }
14599
14600 mHdrPlusModeEnabled = true;
14601 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14602
14603 return OK;
14604}
14605
14606void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14607{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014608 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014609 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014610 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14611 if (res != OK) {
14612 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14613 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014614
14615 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014616 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014617 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014618 }
14619
14620 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014621 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014622 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14623}
14624
14625status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014626{
14627 pbcamera::InputConfiguration inputConfig;
14628 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14629 status_t res = OK;
14630
14631 // Configure HDR+ client streams.
14632 // Get input config.
14633 if (mHdrPlusRawSrcChannel) {
14634 // HDR+ input buffers will be provided by HAL.
14635 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14636 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14637 if (res != OK) {
14638 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14639 __FUNCTION__, strerror(-res), res);
14640 return res;
14641 }
14642
14643 inputConfig.isSensorInput = false;
14644 } else {
14645 // Sensor MIPI will send data to Easel.
14646 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014647 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014648 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14649 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14650 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14651 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14652 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014653 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014654 if (mSensorModeInfo.num_raw_bits != 10) {
14655 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14656 mSensorModeInfo.num_raw_bits);
14657 return BAD_VALUE;
14658 }
14659
14660 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014661 }
14662
14663 // Get output configurations.
14664 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014665 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014666
14667 // Easel may need to output YUV output buffers if mPictureChannel was created.
14668 pbcamera::StreamConfiguration yuvOutputConfig;
14669 if (mPictureChannel != nullptr) {
14670 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14671 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14672 if (res != OK) {
14673 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14674 __FUNCTION__, strerror(-res), res);
14675
14676 return res;
14677 }
14678
14679 outputStreamConfigs.push_back(yuvOutputConfig);
14680 }
14681
14682 // TODO: consider other channels for YUV output buffers.
14683
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014684 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014685 if (res != OK) {
14686 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14687 strerror(-res), res);
14688 return res;
14689 }
14690
14691 return OK;
14692}
14693
Chien-Yu Chen933db802017-07-14 14:31:53 -070014694void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14695{
14696 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14697 // Set HAL state to error.
14698 pthread_mutex_lock(&mMutex);
14699 mState = ERROR;
14700 pthread_mutex_unlock(&mMutex);
14701
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014702 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014703}
14704
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014705void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14706{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014707 if (client == nullptr) {
14708 ALOGE("%s: Opened client is null.", __FUNCTION__);
14709 return;
14710 }
14711
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014712 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014713 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14714
14715 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014716 if (!gHdrPlusClientOpening) {
14717 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14718 return;
14719 }
14720
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014721 gHdrPlusClient = std::move(client);
14722 gHdrPlusClientOpening = false;
14723
14724 // Set static metadata.
14725 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14726 if (res != OK) {
14727 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14728 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014729 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014730 gHdrPlusClient = nullptr;
14731 return;
14732 }
14733
14734 // Enable HDR+ mode.
14735 res = enableHdrPlusModeLocked();
14736 if (res != OK) {
14737 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14738 }
14739}
14740
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014741void QCamera3HardwareInterface::onOpenFailed(status_t err)
14742{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014743 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14744 Mutex::Autolock l(gHdrPlusClientLock);
14745 gHdrPlusClientOpening = false;
14746}
14747
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014748void QCamera3HardwareInterface::onFatalError()
14749{
14750 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14751
14752 // Set HAL state to error.
14753 pthread_mutex_lock(&mMutex);
14754 mState = ERROR;
14755 pthread_mutex_unlock(&mMutex);
14756
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014757 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014758}
14759
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014760void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014761 const camera_metadata_t &resultMetadata)
14762{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014763 if (result != nullptr) {
14764 if (result->outputBuffers.size() != 1) {
14765 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14766 result->outputBuffers.size());
14767 return;
14768 }
14769
14770 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14771 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14772 result->outputBuffers[0].streamId);
14773 return;
14774 }
14775
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014776 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014777 HdrPlusPendingRequest pendingRequest;
14778 {
14779 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14780 auto req = mHdrPlusPendingRequests.find(result->requestId);
14781 pendingRequest = req->second;
14782 }
14783
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014784 // Update the result metadata with the settings of the HDR+ still capture request because
14785 // the result metadata belongs to a ZSL buffer.
14786 CameraMetadata metadata;
14787 metadata = &resultMetadata;
14788 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14789 camera_metadata_t* updatedResultMetadata = metadata.release();
14790
14791 QCamera3PicChannel *picChannel =
14792 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14793
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014794 // Check if dumping HDR+ YUV output is enabled.
14795 char prop[PROPERTY_VALUE_MAX];
14796 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14797 bool dumpYuvOutput = atoi(prop);
14798
14799 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014800 // Dump yuv buffer to a ppm file.
14801 pbcamera::StreamConfiguration outputConfig;
14802 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14803 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14804 if (rc == OK) {
14805 char buf[FILENAME_MAX] = {};
14806 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14807 result->requestId, result->outputBuffers[0].streamId,
14808 outputConfig.image.width, outputConfig.image.height);
14809
14810 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14811 } else {
14812 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14813 __FUNCTION__, strerror(-rc), rc);
14814 }
14815 }
14816
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014817 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14818 auto halMetadata = std::make_shared<metadata_buffer_t>();
14819 clear_metadata_buffer(halMetadata.get());
14820
14821 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14822 // encoding.
14823 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14824 halStreamId, /*minFrameDuration*/0);
14825 if (res == OK) {
14826 // Return the buffer to pic channel for encoding.
14827 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14828 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14829 halMetadata);
14830 } else {
14831 // Return the buffer without encoding.
14832 // TODO: This should not happen but we may want to report an error buffer to camera
14833 // service.
14834 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14835 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14836 strerror(-res), res);
14837 }
14838
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014839 // Find the timestamp
14840 camera_metadata_ro_entry_t entry;
14841 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14842 ANDROID_SENSOR_TIMESTAMP, &entry);
14843 if (res != OK) {
14844 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14845 __FUNCTION__, result->requestId, strerror(-res), res);
14846 } else {
14847 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14848 }
14849
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014850 // Send HDR+ metadata to framework.
14851 {
14852 pthread_mutex_lock(&mMutex);
14853
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014854 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14855 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014856 pthread_mutex_unlock(&mMutex);
14857 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014858
14859 // Remove the HDR+ pending request.
14860 {
14861 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14862 auto req = mHdrPlusPendingRequests.find(result->requestId);
14863 mHdrPlusPendingRequests.erase(req);
14864 }
14865 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014866}
14867
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014868void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14869{
14870 if (failedResult == nullptr) {
14871 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14872 return;
14873 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014874
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014875 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014876
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014877 // Remove the pending HDR+ request.
14878 {
14879 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14880 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14881
14882 // Return the buffer to pic channel.
14883 QCamera3PicChannel *picChannel =
14884 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14885 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14886
14887 mHdrPlusPendingRequests.erase(pendingRequest);
14888 }
14889
14890 pthread_mutex_lock(&mMutex);
14891
14892 // Find the pending buffers.
14893 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14894 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14895 if (pendingBuffers->frame_number == failedResult->requestId) {
14896 break;
14897 }
14898 pendingBuffers++;
14899 }
14900
14901 // Send out buffer errors for the pending buffers.
14902 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14903 std::vector<camera3_stream_buffer_t> streamBuffers;
14904 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14905 // Prepare a stream buffer.
14906 camera3_stream_buffer_t streamBuffer = {};
14907 streamBuffer.stream = buffer.stream;
14908 streamBuffer.buffer = buffer.buffer;
14909 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14910 streamBuffer.acquire_fence = -1;
14911 streamBuffer.release_fence = -1;
14912
14913 streamBuffers.push_back(streamBuffer);
14914
14915 // Send out error buffer event.
14916 camera3_notify_msg_t notify_msg = {};
14917 notify_msg.type = CAMERA3_MSG_ERROR;
14918 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14919 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14920 notify_msg.message.error.error_stream = buffer.stream;
14921
14922 orchestrateNotify(&notify_msg);
14923 }
14924
14925 camera3_capture_result_t result = {};
14926 result.frame_number = pendingBuffers->frame_number;
14927 result.num_output_buffers = streamBuffers.size();
14928 result.output_buffers = &streamBuffers[0];
14929
14930 // Send out result with buffer errors.
14931 orchestrateResult(&result);
14932
14933 // Remove pending buffers.
14934 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14935 }
14936
14937 // Remove pending request.
14938 auto halRequest = mPendingRequestsList.begin();
14939 while (halRequest != mPendingRequestsList.end()) {
14940 if (halRequest->frame_number == failedResult->requestId) {
14941 mPendingRequestsList.erase(halRequest);
14942 break;
14943 }
14944 halRequest++;
14945 }
14946
14947 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014948}
14949
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014950
14951ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14952 mParent(parent) {}
14953
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014954void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014955{
14956 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014957
14958 if (isReprocess) {
14959 mReprocessShutters.emplace(frameNumber, Shutter());
14960 } else {
14961 mShutters.emplace(frameNumber, Shutter());
14962 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014963}
14964
14965void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14966{
14967 std::lock_guard<std::mutex> lock(mLock);
14968
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014969 std::map<uint32_t, Shutter> *shutters = nullptr;
14970
14971 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014972 auto shutter = mShutters.find(frameNumber);
14973 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014974 shutter = mReprocessShutters.find(frameNumber);
14975 if (shutter == mReprocessShutters.end()) {
14976 // Shutter was already sent.
14977 return;
14978 }
14979 shutters = &mReprocessShutters;
14980 } else {
14981 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014982 }
14983
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014984 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014985 shutter->second.ready = true;
14986 shutter->second.timestamp = timestamp;
14987
14988 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014989 shutter = shutters->begin();
14990 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014991 if (!shutter->second.ready) {
14992 // If this shutter is not ready, the following shutters can't be sent.
14993 break;
14994 }
14995
14996 camera3_notify_msg_t msg = {};
14997 msg.type = CAMERA3_MSG_SHUTTER;
14998 msg.message.shutter.frame_number = shutter->first;
14999 msg.message.shutter.timestamp = shutter->second.timestamp;
15000 mParent->orchestrateNotify(&msg);
15001
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015002 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015003 }
15004}
15005
15006void ShutterDispatcher::clear(uint32_t frameNumber)
15007{
15008 std::lock_guard<std::mutex> lock(mLock);
15009 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015010 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015011}
15012
15013void ShutterDispatcher::clear()
15014{
15015 std::lock_guard<std::mutex> lock(mLock);
15016
15017 // Log errors for stale shutters.
15018 for (auto &shutter : mShutters) {
15019 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15020 __FUNCTION__, shutter.first, shutter.second.ready,
15021 shutter.second.timestamp);
15022 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015023
15024 // Log errors for stale reprocess shutters.
15025 for (auto &shutter : mReprocessShutters) {
15026 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15027 __FUNCTION__, shutter.first, shutter.second.ready,
15028 shutter.second.timestamp);
15029 }
15030
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015031 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015032 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015033}
15034
15035OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15036 mParent(parent) {}
15037
15038status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15039{
15040 std::lock_guard<std::mutex> lock(mLock);
15041 mStreamBuffers.clear();
15042 if (!streamList) {
15043 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15044 return -EINVAL;
15045 }
15046
15047 // Create a "frame-number -> buffer" map for each stream.
15048 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15049 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15050 }
15051
15052 return OK;
15053}
15054
15055status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15056{
15057 std::lock_guard<std::mutex> lock(mLock);
15058
15059 // Find the "frame-number -> buffer" map for the stream.
15060 auto buffers = mStreamBuffers.find(stream);
15061 if (buffers == mStreamBuffers.end()) {
15062 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15063 return -EINVAL;
15064 }
15065
15066 // Create an unready buffer for this frame number.
15067 buffers->second.emplace(frameNumber, Buffer());
15068 return OK;
15069}
15070
15071void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15072 const camera3_stream_buffer_t &buffer)
15073{
15074 std::lock_guard<std::mutex> lock(mLock);
15075
15076 // Find the frame number -> buffer map for the stream.
15077 auto buffers = mStreamBuffers.find(buffer.stream);
15078 if (buffers == mStreamBuffers.end()) {
15079 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15080 return;
15081 }
15082
15083 // Find the unready buffer this frame number and mark it ready.
15084 auto pendingBuffer = buffers->second.find(frameNumber);
15085 if (pendingBuffer == buffers->second.end()) {
15086 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15087 return;
15088 }
15089
15090 pendingBuffer->second.ready = true;
15091 pendingBuffer->second.buffer = buffer;
15092
15093 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15094 pendingBuffer = buffers->second.begin();
15095 while (pendingBuffer != buffers->second.end()) {
15096 if (!pendingBuffer->second.ready) {
15097 // If this buffer is not ready, the following buffers can't be sent.
15098 break;
15099 }
15100
15101 camera3_capture_result_t result = {};
15102 result.frame_number = pendingBuffer->first;
15103 result.num_output_buffers = 1;
15104 result.output_buffers = &pendingBuffer->second.buffer;
15105
15106 // Send out result with buffer errors.
15107 mParent->orchestrateResult(&result);
15108
15109 pendingBuffer = buffers->second.erase(pendingBuffer);
15110 }
15111}
15112
15113void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15114{
15115 std::lock_guard<std::mutex> lock(mLock);
15116
15117 // Log errors for stale buffers.
15118 for (auto &buffers : mStreamBuffers) {
15119 for (auto &buffer : buffers.second) {
15120 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15121 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15122 }
15123 buffers.second.clear();
15124 }
15125
15126 if (clearConfiguredStreams) {
15127 mStreamBuffers.clear();
15128 }
15129}
15130
Thierry Strudel3d639192016-09-09 11:52:26 -070015131}; //end namespace qcamera